var/home/core/zuul-output/0000755000175000017500000000000015114466154014534 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114500376015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005137406515114500370017704 0ustar rootrootDec 05 05:26:37 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 05:26:37 crc restorecon[4557]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:26:37 crc restorecon[4557]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 05:26:37 crc kubenswrapper[4652]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 05:26:37 crc kubenswrapper[4652]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 05:26:37 crc kubenswrapper[4652]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 05:26:37 crc kubenswrapper[4652]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 05:26:37 crc kubenswrapper[4652]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 05:26:37 crc kubenswrapper[4652]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.948999 4652 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951842 4652 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951866 4652 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951871 4652 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951875 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951879 4652 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951883 4652 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951887 4652 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951897 4652 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951901 4652 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951907 4652 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951912 4652 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951917 4652 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951922 4652 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951927 4652 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951932 4652 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951937 4652 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951940 4652 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951943 4652 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951947 4652 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951951 4652 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951954 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951958 4652 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951961 4652 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951965 4652 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951969 4652 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951974 4652 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951978 4652 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951982 4652 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951986 4652 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951989 4652 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951992 4652 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.951996 4652 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952000 4652 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952003 4652 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952007 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952010 4652 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952015 4652 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952019 4652 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952023 4652 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952026 4652 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952029 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952033 4652 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952036 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952040 4652 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952043 4652 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952048 4652 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952052 4652 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952055 4652 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952058 4652 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952062 4652 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952065 4652 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952068 4652 feature_gate.go:330] unrecognized feature gate: Example Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952072 4652 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952075 4652 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952078 4652 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952082 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952085 4652 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952089 4652 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952093 4652 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952096 4652 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952100 4652 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952104 4652 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952107 4652 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952112 4652 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952116 4652 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952119 4652 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952123 4652 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952126 4652 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952129 4652 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952133 4652 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.952136 4652 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952890 4652 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952904 4652 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952912 4652 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952918 4652 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952923 4652 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952927 4652 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952932 4652 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952938 4652 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952942 4652 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952947 4652 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952952 4652 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952956 4652 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952960 4652 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952964 4652 flags.go:64] FLAG: --cgroup-root="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952973 4652 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952977 4652 flags.go:64] FLAG: --client-ca-file="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952980 4652 flags.go:64] FLAG: --cloud-config="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952984 4652 flags.go:64] FLAG: --cloud-provider="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952988 4652 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952992 4652 flags.go:64] FLAG: --cluster-domain="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.952996 4652 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953000 4652 flags.go:64] FLAG: --config-dir="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953003 4652 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953007 4652 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953011 4652 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953015 4652 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953019 4652 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953023 4652 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953027 4652 flags.go:64] FLAG: --contention-profiling="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953030 4652 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953033 4652 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953037 4652 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953041 4652 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953045 4652 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953049 4652 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953054 4652 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953059 4652 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953064 4652 flags.go:64] FLAG: --enable-server="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953069 4652 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953075 4652 flags.go:64] FLAG: --event-burst="100" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953080 4652 flags.go:64] FLAG: --event-qps="50" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953085 4652 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953089 4652 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953093 4652 flags.go:64] FLAG: --eviction-hard="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953099 4652 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953103 4652 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953108 4652 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953112 4652 flags.go:64] FLAG: --eviction-soft="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953116 4652 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953120 4652 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953124 4652 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953127 4652 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953131 4652 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953135 4652 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953138 4652 flags.go:64] FLAG: --feature-gates="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953143 4652 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953146 4652 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953150 4652 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953154 4652 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953158 4652 flags.go:64] FLAG: --healthz-port="10248" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953162 4652 flags.go:64] FLAG: --help="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953166 4652 flags.go:64] FLAG: --hostname-override="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953169 4652 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953173 4652 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953176 4652 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953180 4652 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953183 4652 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953187 4652 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953190 4652 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953194 4652 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953197 4652 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953201 4652 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953205 4652 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953208 4652 flags.go:64] FLAG: --kube-reserved="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953212 4652 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953216 4652 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953220 4652 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953224 4652 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953229 4652 flags.go:64] FLAG: --lock-file="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953234 4652 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953238 4652 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953241 4652 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953247 4652 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953250 4652 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953254 4652 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953257 4652 flags.go:64] FLAG: --logging-format="text" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953261 4652 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953265 4652 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953270 4652 flags.go:64] FLAG: --manifest-url="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953274 4652 flags.go:64] FLAG: --manifest-url-header="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953278 4652 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953282 4652 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953286 4652 flags.go:64] FLAG: --max-pods="110" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953290 4652 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953293 4652 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953297 4652 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953300 4652 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953304 4652 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953307 4652 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953311 4652 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953319 4652 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953323 4652 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953327 4652 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953330 4652 flags.go:64] FLAG: --pod-cidr="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953334 4652 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953340 4652 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953344 4652 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953348 4652 flags.go:64] FLAG: --pods-per-core="0" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953351 4652 flags.go:64] FLAG: --port="10250" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953355 4652 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953359 4652 flags.go:64] FLAG: --provider-id="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953365 4652 flags.go:64] FLAG: --qos-reserved="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953369 4652 flags.go:64] FLAG: --read-only-port="10255" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953373 4652 flags.go:64] FLAG: --register-node="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953376 4652 flags.go:64] FLAG: --register-schedulable="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953381 4652 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953388 4652 flags.go:64] FLAG: --registry-burst="10" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953391 4652 flags.go:64] FLAG: --registry-qps="5" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953395 4652 flags.go:64] FLAG: --reserved-cpus="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953398 4652 flags.go:64] FLAG: --reserved-memory="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953408 4652 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953412 4652 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953415 4652 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953419 4652 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953423 4652 flags.go:64] FLAG: --runonce="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953427 4652 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953430 4652 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953434 4652 flags.go:64] FLAG: --seccomp-default="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953437 4652 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953441 4652 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953445 4652 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953448 4652 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953452 4652 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953455 4652 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953459 4652 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953463 4652 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953466 4652 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953470 4652 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953485 4652 flags.go:64] FLAG: --system-cgroups="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953489 4652 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953495 4652 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953499 4652 flags.go:64] FLAG: --tls-cert-file="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953502 4652 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953508 4652 flags.go:64] FLAG: --tls-min-version="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953511 4652 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953515 4652 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953519 4652 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953524 4652 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953528 4652 flags.go:64] FLAG: --v="2" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953535 4652 flags.go:64] FLAG: --version="false" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953539 4652 flags.go:64] FLAG: --vmodule="" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953543 4652 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953548 4652 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953645 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953651 4652 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953655 4652 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953658 4652 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953662 4652 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953665 4652 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953669 4652 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953672 4652 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953675 4652 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953678 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953682 4652 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953685 4652 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953688 4652 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953691 4652 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953694 4652 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953697 4652 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953700 4652 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953703 4652 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953707 4652 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953710 4652 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953713 4652 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953717 4652 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953721 4652 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953725 4652 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953728 4652 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953731 4652 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953737 4652 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953742 4652 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953746 4652 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953749 4652 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953753 4652 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953756 4652 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953759 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953763 4652 feature_gate.go:330] unrecognized feature gate: Example Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953766 4652 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953770 4652 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953773 4652 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953776 4652 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953779 4652 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953782 4652 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953786 4652 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953789 4652 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953792 4652 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953796 4652 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953799 4652 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953803 4652 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953807 4652 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953811 4652 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953815 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953819 4652 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953822 4652 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953826 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953829 4652 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953833 4652 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953838 4652 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953841 4652 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953845 4652 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953849 4652 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953852 4652 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953856 4652 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953859 4652 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953863 4652 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953866 4652 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953869 4652 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953872 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953876 4652 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953880 4652 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953883 4652 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953886 4652 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953890 4652 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.953893 4652 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.953904 4652 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.962240 4652 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.962277 4652 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962381 4652 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962399 4652 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962404 4652 feature_gate.go:330] unrecognized feature gate: Example Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962410 4652 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962419 4652 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962425 4652 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962429 4652 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962433 4652 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962437 4652 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962441 4652 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962446 4652 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962450 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962454 4652 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962458 4652 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962462 4652 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962465 4652 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962469 4652 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962473 4652 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962490 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962493 4652 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962498 4652 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962504 4652 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962509 4652 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962514 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962521 4652 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962526 4652 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962531 4652 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962537 4652 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962543 4652 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962548 4652 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962573 4652 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962579 4652 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962583 4652 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962587 4652 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962591 4652 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962594 4652 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962598 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962602 4652 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962605 4652 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962609 4652 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962613 4652 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962618 4652 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962621 4652 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962627 4652 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962631 4652 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962636 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962641 4652 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962645 4652 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962650 4652 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962655 4652 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962659 4652 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962664 4652 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962667 4652 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962672 4652 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962675 4652 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962679 4652 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962683 4652 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962688 4652 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962691 4652 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962696 4652 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962700 4652 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962703 4652 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962706 4652 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962709 4652 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962714 4652 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962719 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962722 4652 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962726 4652 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962729 4652 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962733 4652 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962736 4652 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.962743 4652 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962867 4652 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962875 4652 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962880 4652 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962883 4652 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962888 4652 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962891 4652 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962894 4652 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962897 4652 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962902 4652 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962906 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962910 4652 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962913 4652 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962917 4652 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962921 4652 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962925 4652 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962930 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962934 4652 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962938 4652 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962942 4652 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962946 4652 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962951 4652 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962958 4652 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962962 4652 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962966 4652 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962969 4652 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962973 4652 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962976 4652 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962979 4652 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962982 4652 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962985 4652 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962988 4652 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962992 4652 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962995 4652 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.962998 4652 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963001 4652 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963004 4652 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963007 4652 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963012 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963016 4652 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963019 4652 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963022 4652 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963025 4652 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963028 4652 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963032 4652 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963036 4652 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963040 4652 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963043 4652 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963046 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963050 4652 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963053 4652 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963057 4652 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963060 4652 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963064 4652 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963067 4652 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963070 4652 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963073 4652 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963077 4652 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963080 4652 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963083 4652 feature_gate.go:330] unrecognized feature gate: Example Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963086 4652 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963089 4652 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963092 4652 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963095 4652 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963099 4652 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963102 4652 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963105 4652 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963109 4652 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963113 4652 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963117 4652 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963121 4652 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 05:26:37 crc kubenswrapper[4652]: W1205 05:26:37.963124 4652 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.963130 4652 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.963316 4652 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.966198 4652 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.966283 4652 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.968098 4652 server.go:997] "Starting client certificate rotation" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.968128 4652 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.968305 4652 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-02 02:46:54.899215093 +0000 UTC Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.968432 4652 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 669h20m16.930786572s for next certificate rotation Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.980611 4652 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.982050 4652 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 05:26:37 crc kubenswrapper[4652]: I1205 05:26:37.992138 4652 log.go:25] "Validated CRI v1 runtime API" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.009498 4652 log.go:25] "Validated CRI v1 image API" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.012299 4652 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.016240 4652 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-05-23-06-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.016272 4652 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.044198 4652 manager.go:217] Machine: {Timestamp:2025-12-05 05:26:38.032789334 +0000 UTC m=+0.269519601 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445404 MemoryCapacity:33654116352 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:dd778907-0455-45bb-b295-f8f78dcf8791 BootID:b4a8999b-e59d-4947-b2ae-b94914acb85b Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827056128 Type:vfs Inodes:4108168 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108168 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108168 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:8d:a4:5e Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:8d:a4:5e Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:ed:27:18 Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:1f:72:79 Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:31:d9:96 Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:55:0a:0a Speed:-1 Mtu:1436} {Name:eth10 MacAddress:e6:7f:6d:16:66:81 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:0a:d4:78:82:7e:9e Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654116352 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.044441 4652 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.044687 4652 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.055312 4652 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.055865 4652 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.055947 4652 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.056432 4652 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.056443 4652 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.056803 4652 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.056861 4652 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.065004 4652 state_mem.go:36] "Initialized new in-memory state store" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.065100 4652 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.066469 4652 kubelet.go:418] "Attempting to sync node with API server" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.066503 4652 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.066527 4652 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.066542 4652 kubelet.go:324] "Adding apiserver pod source" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.066569 4652 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.068660 4652 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.069253 4652 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.070615 4652 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.070701 4652 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.070755 4652 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.93:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.070748 4652 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.070833 4652 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.93:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071798 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071829 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071839 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071848 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071864 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071871 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071877 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071887 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071899 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071913 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071923 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.071930 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.072444 4652 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.072933 4652 server.go:1280] "Started kubelet" Dec 05 05:26:38 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.074391 4652 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.074700 4652 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.074585 4652 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.075068 4652 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.076860 4652 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.076893 4652 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.077019 4652 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 23:16:07.334610906 +0000 UTC Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.077270 4652 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.077284 4652 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.079358 4652 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.078804 4652 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.079742 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="200ms" Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.080219 4652 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.080345 4652 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.93:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.079904 4652 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.25.93:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e3a79a2d18ccd default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:26:38.072909005 +0000 UTC m=+0.309639272,LastTimestamp:2025-12-05 05:26:38.072909005 +0000 UTC m=+0.309639272,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.081301 4652 server.go:460] "Adding debug handlers to kubelet server" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.082644 4652 factory.go:55] Registering systemd factory Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.082694 4652 factory.go:221] Registration of the systemd container factory successfully Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.083082 4652 factory.go:153] Registering CRI-O factory Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.083098 4652 factory.go:221] Registration of the crio container factory successfully Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.083797 4652 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.083854 4652 factory.go:103] Registering Raw factory Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.083881 4652 manager.go:1196] Started watching for new ooms in manager Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.084511 4652 manager.go:319] Starting recovery of all containers Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095090 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095169 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095183 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095218 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095230 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095242 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095253 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095268 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095280 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095289 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095303 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095314 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095326 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095344 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095355 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095366 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095381 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095392 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095402 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095412 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095421 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095433 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095447 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095458 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095470 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.095493 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096549 4652 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096615 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096633 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096647 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096658 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096673 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096688 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096701 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096725 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096739 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096752 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096770 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096783 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096796 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096810 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096822 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096835 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096847 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096899 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096915 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096929 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096942 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096957 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096971 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096985 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.096998 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097010 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097027 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097039 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097051 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097066 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097080 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097095 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097108 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097122 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097134 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097148 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097161 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097174 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097191 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097204 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097217 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097232 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097249 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097263 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097276 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097287 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097299 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097310 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097323 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097336 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097347 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097363 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097375 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097388 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097403 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097416 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097428 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097441 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097454 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097468 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097489 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097503 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097514 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097525 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097538 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097549 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097577 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097588 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097599 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097609 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097623 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097636 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097650 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097663 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097677 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097691 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097705 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097716 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097737 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097753 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097768 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097782 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097797 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097810 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097822 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097857 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097869 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097881 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097893 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097908 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097922 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097959 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097970 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097982 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.097991 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098004 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098017 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098029 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098040 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098051 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098063 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098078 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098092 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098103 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098117 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098130 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098146 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098159 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098174 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098186 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098199 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098211 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098225 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098237 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098248 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098259 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098269 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098283 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098297 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098309 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098322 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098334 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098347 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098359 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098371 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098384 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098398 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098408 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098419 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098430 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098440 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098454 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098466 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098489 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098502 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098515 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098527 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098542 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098568 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098583 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098594 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098608 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098621 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098636 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098646 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098659 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098671 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098684 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098697 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098711 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098723 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098734 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098747 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098757 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098768 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098778 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098789 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098802 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098814 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098826 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098837 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098848 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098860 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098872 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098885 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098899 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098912 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098924 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098937 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098949 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098968 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098981 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.098994 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099010 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099020 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099032 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099047 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099065 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099078 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099090 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099100 4652 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099112 4652 reconstruct.go:97] "Volume reconstruction finished" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.099121 4652 reconciler.go:26] "Reconciler: start to sync state" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.105273 4652 manager.go:324] Recovery completed Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.118863 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.120543 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.120612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.120629 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.121590 4652 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.121655 4652 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.121714 4652 state_mem.go:36] "Initialized new in-memory state store" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.122324 4652 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.122971 4652 policy_none.go:49] "None policy: Start" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.123844 4652 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.123875 4652 state_mem.go:35] "Initializing new in-memory state store" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.124382 4652 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.124422 4652 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.124452 4652 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.124502 4652 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.125050 4652 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.125090 4652 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.93:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.177358 4652 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.179197 4652 manager.go:334] "Starting Device Plugin manager" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.179268 4652 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.179286 4652 server.go:79] "Starting device plugin registration server" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.179698 4652 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.179719 4652 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.179992 4652 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.180085 4652 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.180100 4652 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.189348 4652 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.224879 4652 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.224968 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.225926 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.225964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.225977 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.226124 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.226598 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.226647 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227107 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227145 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227157 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227313 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227499 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227524 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227534 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227591 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.227658 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228232 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228260 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228271 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228373 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228548 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228607 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228948 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228972 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.228983 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229027 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229042 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229051 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229090 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229243 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229270 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229396 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229463 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229691 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229834 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.229854 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.230023 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.230097 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.230160 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.230124 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.230275 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.230916 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.230937 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.230968 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.279928 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.280382 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="400ms" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.280615 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.280648 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.280659 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.280681 4652 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.281025 4652 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.93:6443: connect: connection refused" node="crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300597 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300635 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300667 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300691 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300712 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300735 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300756 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300783 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300803 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300822 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300842 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300862 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300878 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300899 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.300916 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402188 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402291 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402373 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402464 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402377 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402461 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402306 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402509 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402575 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402709 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402736 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402761 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402780 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402803 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402785 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402821 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402760 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402842 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402867 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402888 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402910 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402916 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402938 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402953 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402932 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402957 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402990 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.402971 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.403126 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.403184 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.481812 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.482926 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.482979 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.482990 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.483010 4652 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.483417 4652 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.93:6443: connect: connection refused" node="crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.565116 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.586353 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-266ecd11e4042af0b232e1f688d986a2506bbb212a73625848099c1ce6526438 WatchSource:0}: Error finding container 266ecd11e4042af0b232e1f688d986a2506bbb212a73625848099c1ce6526438: Status 404 returned error can't find the container with id 266ecd11e4042af0b232e1f688d986a2506bbb212a73625848099c1ce6526438 Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.589616 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.596362 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.607588 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-c7cd1141752b9fc8e2585ac7ea73ea2d4752de460741457c0ebd8e8c3d86f763 WatchSource:0}: Error finding container c7cd1141752b9fc8e2585ac7ea73ea2d4752de460741457c0ebd8e8c3d86f763: Status 404 returned error can't find the container with id c7cd1141752b9fc8e2585ac7ea73ea2d4752de460741457c0ebd8e8c3d86f763 Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.610612 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.611358 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-33883d0cc43a4dd333c952bd4c9651314acf9e4164e4652a1415698906d02f22 WatchSource:0}: Error finding container 33883d0cc43a4dd333c952bd4c9651314acf9e4164e4652a1415698906d02f22: Status 404 returned error can't find the container with id 33883d0cc43a4dd333c952bd4c9651314acf9e4164e4652a1415698906d02f22 Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.615945 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.620957 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-9471e194521ce77b7b2d7b70529985141a8eab8d09ff1e6c0b67cb18a63b91c9 WatchSource:0}: Error finding container 9471e194521ce77b7b2d7b70529985141a8eab8d09ff1e6c0b67cb18a63b91c9: Status 404 returned error can't find the container with id 9471e194521ce77b7b2d7b70529985141a8eab8d09ff1e6c0b67cb18a63b91c9 Dec 05 05:26:38 crc kubenswrapper[4652]: W1205 05:26:38.632975 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-2843643671b07f40c9093d18cd0fdff36b55b9da740ba824057ec86192c9ce55 WatchSource:0}: Error finding container 2843643671b07f40c9093d18cd0fdff36b55b9da740ba824057ec86192c9ce55: Status 404 returned error can't find the container with id 2843643671b07f40c9093d18cd0fdff36b55b9da740ba824057ec86192c9ce55 Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.681722 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="800ms" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.883905 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.885492 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.885531 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.885544 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:38 crc kubenswrapper[4652]: I1205 05:26:38.885601 4652 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:26:38 crc kubenswrapper[4652]: E1205 05:26:38.885971 4652 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.93:6443: connect: connection refused" node="crc" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.076341 4652 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.077334 4652 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 01:39:59.060397959 +0000 UTC Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.077417 4652 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 692h13m19.98298321s for next certificate rotation Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.130069 4652 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8b711342d8a0c8caaab4b258f65556cdca66596850243c86d0fef32a6b63b3a7" exitCode=0 Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.130174 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8b711342d8a0c8caaab4b258f65556cdca66596850243c86d0fef32a6b63b3a7"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.130337 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c7cd1141752b9fc8e2585ac7ea73ea2d4752de460741457c0ebd8e8c3d86f763"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.130513 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.131436 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.131497 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.131519 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.131771 4652 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9" exitCode=0 Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.131848 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.131882 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"266ecd11e4042af0b232e1f688d986a2506bbb212a73625848099c1ce6526438"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.131962 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.133230 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.133281 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.133295 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.135491 4652 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8" exitCode=0 Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.135690 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.135749 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2843643671b07f40c9093d18cd0fdff36b55b9da740ba824057ec86192c9ce55"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.136090 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.138709 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.138764 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.138782 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.140169 4652 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f" exitCode=0 Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.140261 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.140337 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9471e194521ce77b7b2d7b70529985141a8eab8d09ff1e6c0b67cb18a63b91c9"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.140461 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.141288 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.141326 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.141339 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.142902 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.142952 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"33883d0cc43a4dd333c952bd4c9651314acf9e4164e4652a1415698906d02f22"} Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.143105 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.143813 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.143845 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.143857 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:39 crc kubenswrapper[4652]: W1205 05:26:39.238981 4652 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:39 crc kubenswrapper[4652]: E1205 05:26:39.239087 4652 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.93:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:26:39 crc kubenswrapper[4652]: W1205 05:26:39.369112 4652 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:39 crc kubenswrapper[4652]: E1205 05:26:39.369196 4652 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.93:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:26:39 crc kubenswrapper[4652]: E1205 05:26:39.483730 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="1.6s" Dec 05 05:26:39 crc kubenswrapper[4652]: W1205 05:26:39.502906 4652 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:39 crc kubenswrapper[4652]: E1205 05:26:39.502980 4652 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.93:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:26:39 crc kubenswrapper[4652]: E1205 05:26:39.571017 4652 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.25.93:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e3a79a2d18ccd default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:26:38.072909005 +0000 UTC m=+0.309639272,LastTimestamp:2025-12-05 05:26:38.072909005 +0000 UTC m=+0.309639272,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:26:39 crc kubenswrapper[4652]: W1205 05:26:39.647076 4652 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.93:6443: connect: connection refused Dec 05 05:26:39 crc kubenswrapper[4652]: E1205 05:26:39.647160 4652 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.93:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.687089 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.689500 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.689542 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.689576 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:39 crc kubenswrapper[4652]: I1205 05:26:39.689608 4652 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:26:39 crc kubenswrapper[4652]: E1205 05:26:39.690081 4652 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.93:6443: connect: connection refused" node="crc" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.147752 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.147808 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.147823 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.147924 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.148759 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.148814 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.148828 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.152396 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.152443 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.152458 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.152471 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.152482 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.152628 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.153472 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.153502 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.153516 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.156532 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.156588 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.156601 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.156671 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.157506 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.157531 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.157540 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.159161 4652 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="643f95fcaa113fd2571e096776f7bf5c534c8b3a34db197756e4ad00fcd4dede" exitCode=0 Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.159213 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"643f95fcaa113fd2571e096776f7bf5c534c8b3a34db197756e4ad00fcd4dede"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.159323 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.160128 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.160154 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.160162 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.162111 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406"} Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.162227 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.163026 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.163066 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.163080 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.873812 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:40 crc kubenswrapper[4652]: I1205 05:26:40.991771 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.165987 4652 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7856794cb30b5a3e4355dd556b4387cd08df3273781a9fc8f921e93f8136d591" exitCode=0 Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.166187 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.166621 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7856794cb30b5a3e4355dd556b4387cd08df3273781a9fc8f921e93f8136d591"} Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.166749 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.167155 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.167651 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.167674 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.167684 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.167648 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.167722 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.167735 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.168211 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.168231 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.168248 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.290675 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.291516 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.291546 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.291570 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:41 crc kubenswrapper[4652]: I1205 05:26:41.291592 4652 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.173617 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a6ab346b85ba625d8d46ccf60724ea6dd91208bbcf3edb582360e722e172449c"} Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.173701 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4a4e3e5106004ec9cca94a3b52a349f2b07a8a90e724256c7ca9f652997a476d"} Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.173715 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4776f5dfff3fd4208677e9ae7bf86962a44a941a0656585c7bf93a122856ea76"} Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.173725 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"18094fe3c8196168516b228489dcec30f629313cd0752d54dbc34339da071dda"} Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.173738 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e6e09c5ae692576e22f59c23d54c7e6fb6af3fc5d52c03ef9cae0b7ed9fae8f8"} Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.173740 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.173910 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.174765 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.174798 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.174808 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.174820 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.174843 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.174856 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.921373 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 05:26:42 crc kubenswrapper[4652]: I1205 05:26:42.965323 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.091850 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.092007 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.093138 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.093183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.093198 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.176634 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.176688 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.177665 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.177705 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.177717 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.177841 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.177887 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.177897 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.723588 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.723723 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.724866 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.724898 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.724908 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.731696 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.758394 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.758630 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.759529 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.759582 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:43 crc kubenswrapper[4652]: I1205 05:26:43.759594 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:44 crc kubenswrapper[4652]: I1205 05:26:44.179362 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:44 crc kubenswrapper[4652]: I1205 05:26:44.180318 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:44 crc kubenswrapper[4652]: I1205 05:26:44.180354 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:44 crc kubenswrapper[4652]: I1205 05:26:44.180366 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:45 crc kubenswrapper[4652]: I1205 05:26:45.751317 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:45 crc kubenswrapper[4652]: I1205 05:26:45.751661 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:45 crc kubenswrapper[4652]: I1205 05:26:45.752751 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:45 crc kubenswrapper[4652]: I1205 05:26:45.752781 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:45 crc kubenswrapper[4652]: I1205 05:26:45.752791 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:47 crc kubenswrapper[4652]: I1205 05:26:47.128437 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 05:26:47 crc kubenswrapper[4652]: I1205 05:26:47.128650 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:47 crc kubenswrapper[4652]: I1205 05:26:47.129720 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:47 crc kubenswrapper[4652]: I1205 05:26:47.129760 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:47 crc kubenswrapper[4652]: I1205 05:26:47.129771 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:48 crc kubenswrapper[4652]: E1205 05:26:48.190077 4652 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 05:26:48 crc kubenswrapper[4652]: I1205 05:26:48.752192 4652 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 05:26:48 crc kubenswrapper[4652]: I1205 05:26:48.752314 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.344720 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.344894 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.345966 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.346012 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.346024 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.348079 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.963913 4652 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.964003 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.968355 4652 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 05:26:49 crc kubenswrapper[4652]: I1205 05:26:49.968421 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 05:26:50 crc kubenswrapper[4652]: I1205 05:26:50.202096 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:50 crc kubenswrapper[4652]: I1205 05:26:50.202752 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:50 crc kubenswrapper[4652]: I1205 05:26:50.202778 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:50 crc kubenswrapper[4652]: I1205 05:26:50.202812 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:50 crc kubenswrapper[4652]: I1205 05:26:50.992654 4652 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 05:26:50 crc kubenswrapper[4652]: I1205 05:26:50.992727 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.943418 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.943674 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.944809 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.944922 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.945020 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.953612 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.970740 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.970903 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.971346 4652 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.971431 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.971885 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.971923 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.971934 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:52 crc kubenswrapper[4652]: I1205 05:26:52.974508 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.209772 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.209965 4652 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.210000 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.209778 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.210995 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.211095 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.211159 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.211679 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.211697 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:53 crc kubenswrapper[4652]: I1205 05:26:53.211707 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:54 crc kubenswrapper[4652]: E1205 05:26:54.958231 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 05 05:26:54 crc kubenswrapper[4652]: I1205 05:26:54.960534 4652 trace.go:236] Trace[359325496]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 05:26:42.289) (total time: 12671ms): Dec 05 05:26:54 crc kubenswrapper[4652]: Trace[359325496]: ---"Objects listed" error: 12671ms (05:26:54.960) Dec 05 05:26:54 crc kubenswrapper[4652]: Trace[359325496]: [12.671150788s] [12.671150788s] END Dec 05 05:26:54 crc kubenswrapper[4652]: I1205 05:26:54.960575 4652 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 05:26:54 crc kubenswrapper[4652]: I1205 05:26:54.960679 4652 trace.go:236] Trace[409086455]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 05:26:41.895) (total time: 13064ms): Dec 05 05:26:54 crc kubenswrapper[4652]: Trace[409086455]: ---"Objects listed" error: 13064ms (05:26:54.960) Dec 05 05:26:54 crc kubenswrapper[4652]: Trace[409086455]: [13.064788517s] [13.064788517s] END Dec 05 05:26:54 crc kubenswrapper[4652]: I1205 05:26:54.960726 4652 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 05:26:54 crc kubenswrapper[4652]: I1205 05:26:54.961349 4652 trace.go:236] Trace[741903735]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 05:26:41.104) (total time: 13856ms): Dec 05 05:26:54 crc kubenswrapper[4652]: Trace[741903735]: ---"Objects listed" error: 13856ms (05:26:54.961) Dec 05 05:26:54 crc kubenswrapper[4652]: Trace[741903735]: [13.856835309s] [13.856835309s] END Dec 05 05:26:54 crc kubenswrapper[4652]: I1205 05:26:54.961369 4652 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 05:26:54 crc kubenswrapper[4652]: I1205 05:26:54.961818 4652 trace.go:236] Trace[409949836]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 05:26:42.122) (total time: 12839ms): Dec 05 05:26:54 crc kubenswrapper[4652]: Trace[409949836]: ---"Objects listed" error: 12839ms (05:26:54.961) Dec 05 05:26:54 crc kubenswrapper[4652]: Trace[409949836]: [12.839138453s] [12.839138453s] END Dec 05 05:26:54 crc kubenswrapper[4652]: I1205 05:26:54.961845 4652 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 05:26:54 crc kubenswrapper[4652]: E1205 05:26:54.963472 4652 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.026677 4652 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.073792 4652 apiserver.go:52] "Watching apiserver" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.077215 4652 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.077540 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.077946 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.078141 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.078280 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.078318 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.078332 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.078354 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.078364 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.078820 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.078976 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.080913 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.081034 4652 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.081035 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.080919 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.081087 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.081110 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.081141 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.081165 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.081191 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.081265 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.110463 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.127262 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.127524 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.127650 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.127727 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.127803 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.127900 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.127991 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128058 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128122 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128210 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128278 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128345 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128415 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128271 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128304 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128310 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128310 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128547 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128650 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128693 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128709 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128725 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128763 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128778 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128793 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128808 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128825 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128844 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128858 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128872 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128889 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128917 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128946 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128960 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128985 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129000 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129016 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129029 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129045 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129059 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129178 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129206 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129229 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129287 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129310 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129367 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129401 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129441 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129455 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129481 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129495 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129508 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129531 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129545 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129597 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129611 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128705 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128867 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.128901 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129039 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129308 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129462 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.129509 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130078 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130097 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130188 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130283 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130297 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130302 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130464 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130582 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130758 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130851 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.130950 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.131315 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.132081 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.135325 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.136429 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.136712 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.136846 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.136916 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.137215 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.137473 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.137927 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.138075 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.142433 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.142895 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143061 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143105 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143327 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143474 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143547 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143682 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143734 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143767 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143790 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143814 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143833 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143852 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143872 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143893 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143913 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143929 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143947 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143966 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143978 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.143986 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144045 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144069 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144092 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144110 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144128 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144158 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144175 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144196 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144213 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144235 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144251 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144270 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144262 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144288 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144305 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144332 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144364 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144380 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144399 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144417 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144418 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144436 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144455 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144474 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144484 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144492 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144503 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144570 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144601 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144624 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144627 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144644 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144664 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144684 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144700 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144719 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144741 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144762 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144784 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144809 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144831 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144848 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144867 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144885 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144912 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144931 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144957 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144973 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145063 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145225 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145247 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145270 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145290 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145307 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145334 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145351 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145373 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145392 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145412 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145428 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145449 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145466 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145492 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145508 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145530 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145572 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145589 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145612 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145631 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145655 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145671 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145947 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145970 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145988 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146003 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146023 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146041 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146059 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146078 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146099 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146117 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146146 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146165 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146185 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146201 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146218 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146236 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146252 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146269 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146288 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146304 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146325 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146343 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146362 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146378 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146396 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146415 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146431 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146452 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146475 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146492 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146510 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146528 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146546 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146580 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146599 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146617 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146635 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146652 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146671 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146689 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146704 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147145 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147194 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147230 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147255 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147278 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147299 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147322 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147339 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147361 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147383 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147406 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147461 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147498 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147519 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147540 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147578 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147597 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147625 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147649 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147672 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147690 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147709 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147728 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147747 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147805 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147835 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147960 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.148003 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.148031 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.148244 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.148277 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.148310 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.148488 4652 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.149467 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.150695 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144642 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144691 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144732 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144833 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144866 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144890 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.144999 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145298 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145369 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145404 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.145590 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.146504 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147020 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147211 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147570 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147361 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147631 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147782 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147873 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.147839 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.148263 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.148155 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.148347 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:26:55.648324877 +0000 UTC m=+17.885055144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.153986 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.150646 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.150728 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.150818 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.151047 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.151123 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.151431 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.151892 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.152040 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.152593 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.152631 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.152846 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.152916 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.153613 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.153718 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.153784 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.153813 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.154142 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.154203 4652 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.154328 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.154444 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.154707 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.155114 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.173675 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:55.654380812 +0000 UTC m=+17.891111078 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.173840 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.173863 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.173980 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.174591 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.174225 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.174360 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.174900 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.174944 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.174946 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.174939 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.174972 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175003 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175051 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.175105 4652 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175146 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.175274 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:55.675263649 +0000 UTC m=+17.911993917 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175293 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175314 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175394 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175414 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175426 4652 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175436 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175446 4652 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175456 4652 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175466 4652 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175475 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175484 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175494 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175503 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175512 4652 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175521 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175530 4652 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175529 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175538 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175587 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175612 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175597 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175664 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175680 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175694 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175705 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175722 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175734 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175745 4652 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175756 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175766 4652 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175778 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175788 4652 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175799 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175810 4652 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175819 4652 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175829 4652 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175843 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175861 4652 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175872 4652 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175883 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175895 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175906 4652 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175915 4652 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175925 4652 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175935 4652 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175946 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175956 4652 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175966 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175976 4652 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175989 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176000 4652 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176010 4652 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176019 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176028 4652 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176038 4652 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176049 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176059 4652 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176068 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176078 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176087 4652 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176096 4652 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176105 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176114 4652 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176123 4652 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176142 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176153 4652 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176163 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176172 4652 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176180 4652 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176189 4652 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176200 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176209 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176218 4652 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176228 4652 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176238 4652 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176247 4652 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176256 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176268 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175629 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175626 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175631 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175677 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175796 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175828 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175975 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.175991 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176271 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176419 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176510 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176627 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176666 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176803 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.176964 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.177327 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.177436 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.177745 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.178026 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.178124 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.178614 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.178765 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.179156 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.179474 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.180205 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.180279 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.180403 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.180487 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.181513 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.181749 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.181964 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.182210 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.182327 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.182532 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.183054 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.188016 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.188041 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.188054 4652 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.188100 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:55.68808942 +0000 UTC m=+17.924819687 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.189271 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.190832 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.190989 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.192126 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.192897 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.194470 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.194675 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.195100 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.195709 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.196341 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.197031 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.197382 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.197397 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.197408 4652 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.197458 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:55.697446611 +0000 UTC m=+17.934176877 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.197728 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.198029 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.199283 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.199756 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.200959 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.200922 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.201124 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.201211 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.199030 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.201390 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.201836 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.201838 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.201857 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202054 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202226 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202397 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202443 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202193 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202520 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202684 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202935 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.202988 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.203266 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.203268 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.203414 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.203523 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.204457 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.204760 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.204987 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205016 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205246 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205211 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205408 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205546 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205672 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205649 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205793 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205874 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.205963 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206021 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206063 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206106 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206254 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206267 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206388 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206416 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206443 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206615 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206668 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206680 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.206734 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.212395 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.225747 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.229397 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.234893 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.238991 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.242028 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277179 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277283 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277367 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277384 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277394 4652 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277405 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277418 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277430 4652 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277440 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277450 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277460 4652 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277467 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277476 4652 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277485 4652 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277502 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277510 4652 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277519 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277527 4652 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277536 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277544 4652 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277590 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277601 4652 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277611 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277626 4652 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277638 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277648 4652 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277656 4652 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277667 4652 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277678 4652 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277689 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277700 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277709 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277720 4652 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277730 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277740 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277749 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277758 4652 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277767 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277777 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277787 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277798 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277806 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277815 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277824 4652 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277833 4652 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277842 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277851 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277860 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277869 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277878 4652 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277887 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277898 4652 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277906 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277917 4652 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277927 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277937 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277947 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277963 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277973 4652 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277982 4652 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277991 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.277999 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278008 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278017 4652 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278026 4652 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278034 4652 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278045 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278055 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278066 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278076 4652 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278085 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278094 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278105 4652 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278086 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278115 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278207 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278223 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278242 4652 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278253 4652 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278268 4652 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278294 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278305 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278316 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278326 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278340 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278350 4652 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278360 4652 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278371 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278381 4652 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278391 4652 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278400 4652 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278410 4652 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278420 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278429 4652 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278439 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278449 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278459 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278468 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278479 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278490 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278500 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278510 4652 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278518 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278527 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278536 4652 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278545 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278568 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278578 4652 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278587 4652 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278596 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278607 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278617 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278627 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278636 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278645 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278654 4652 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278665 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278674 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278684 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278692 4652 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278701 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278709 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278718 4652 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278727 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.278167 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.394016 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.402693 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.407932 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:26:55 crc kubenswrapper[4652]: W1205 05:26:55.422547 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-4e2c5b7e71a58d8aed6efcc233568355d57415aaf5dc442eda3d96171d64fc43 WatchSource:0}: Error finding container 4e2c5b7e71a58d8aed6efcc233568355d57415aaf5dc442eda3d96171d64fc43: Status 404 returned error can't find the container with id 4e2c5b7e71a58d8aed6efcc233568355d57415aaf5dc442eda3d96171d64fc43 Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.685236 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.685318 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.685351 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.685415 4652 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.685417 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:26:56.685393091 +0000 UTC m=+18.922123368 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.685488 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:56.685460658 +0000 UTC m=+18.922190925 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.685582 4652 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.685749 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:56.685725375 +0000 UTC m=+18.922455652 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.755082 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.758502 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.764450 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.765912 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.775434 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.786084 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.786243 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.786303 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.786520 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.786659 4652 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.786650 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.786403 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.786867 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.786883 4652 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.786926 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:56.786915653 +0000 UTC m=+19.023645920 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:55 crc kubenswrapper[4652]: E1205 05:26:55.787044 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:56.787023375 +0000 UTC m=+19.023753642 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.795075 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.805909 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.818446 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.830872 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.851066 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.861201 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.870987 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.880364 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.889925 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:55 crc kubenswrapper[4652]: I1205 05:26:55.898228 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.129840 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.130510 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.131984 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.132753 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.133871 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.134486 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.135059 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.135961 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.136535 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.137399 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.137908 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.138895 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.139367 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.139837 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.140674 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.141166 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.142053 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.142436 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.143059 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.143960 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.144455 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.145338 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.145805 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.146738 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.147122 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.147782 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.148713 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.149151 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.149934 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.150323 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.151106 4652 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.151212 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.152672 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.153445 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.153825 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.155298 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.155921 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.156738 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.157340 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.158275 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.158736 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.159611 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.160170 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.161863 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.162534 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.163278 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.164326 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.165348 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.165791 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.166235 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.166682 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.167155 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.167690 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.168115 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.216294 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba"} Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.216334 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d67eedb5bb314b219c60bd53fb86107922cf5ab4b1f2179b3723c81d7030babd"} Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.218031 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.219689 4652 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c" exitCode=255 Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.219763 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c"} Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.220674 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4e2c5b7e71a58d8aed6efcc233568355d57415aaf5dc442eda3d96171d64fc43"} Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.222225 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352"} Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.222288 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c"} Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.222301 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"483781dbc6f21542a9ebfe34a98c2b274444827cb86d52edfd4d2ce088f426d9"} Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.227981 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.228702 4652 scope.go:117] "RemoveContainer" containerID="ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.229321 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.239965 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.250635 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.259807 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.275414 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.286537 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.297104 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.306884 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.328713 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.339401 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.354192 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.370849 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.386231 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.397734 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.410123 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.518033 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.693836 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.694021 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.694123 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:26:58.69407994 +0000 UTC m=+20.930810217 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.694209 4652 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.694237 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.694312 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:58.694283002 +0000 UTC m=+20.931013279 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.694383 4652 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.694434 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:58.694422906 +0000 UTC m=+20.931153173 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.795508 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:26:56 crc kubenswrapper[4652]: I1205 05:26:56.795595 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.795804 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.795854 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.795869 4652 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.795954 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:58.795935549 +0000 UTC m=+21.032665816 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.796079 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.796190 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.796269 4652 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:56 crc kubenswrapper[4652]: E1205 05:26:56.796395 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:26:58.796376337 +0000 UTC m=+21.033106614 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.125338 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.125380 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.125474 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:26:57 crc kubenswrapper[4652]: E1205 05:26:57.125523 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:26:57 crc kubenswrapper[4652]: E1205 05:26:57.125695 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:26:57 crc kubenswrapper[4652]: E1205 05:26:57.125901 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.226480 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.228797 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7"} Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.229139 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.239851 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:57Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.250600 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:57Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.262728 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:57Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.272065 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:57Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.281728 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:57Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.290840 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:57Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.299321 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:57Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:57 crc kubenswrapper[4652]: I1205 05:26:57.308087 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:57Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.137530 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.147457 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.157305 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.164536 4652 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.166042 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.166091 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.166104 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.166186 4652 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.166910 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.172226 4652 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.172445 4652 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.173328 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.173352 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.173360 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.173373 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.173382 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.177168 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.196170 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.196479 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.199649 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.199683 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.199692 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.199705 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.199714 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.206462 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.208917 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.215428 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.215483 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.215496 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.215517 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.215529 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.217574 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.224483 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.227143 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.227174 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.227185 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.227201 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.227212 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.232145 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead"} Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.237865 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.241334 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.241678 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.241725 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.241738 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.241770 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.241783 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.249941 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.250061 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.251286 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.251313 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.251323 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.251336 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.251346 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.251477 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.260350 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.269576 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.278241 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.287080 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.296880 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.307195 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:26:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.354068 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.354109 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.354120 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.354149 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.354161 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.456591 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.456655 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.456667 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.456685 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.456695 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.559477 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.559503 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.559514 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.559527 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.559537 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.661411 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.661448 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.661460 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.661476 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.661485 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.710085 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.710180 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.710225 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.710277 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:02.710248595 +0000 UTC m=+24.946978862 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.710312 4652 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.710364 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:02.710352601 +0000 UTC m=+24.947082867 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.710396 4652 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.710433 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:02.710425648 +0000 UTC m=+24.947155915 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.763492 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.763617 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.763685 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.763757 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.763813 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.810731 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.810840 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.810995 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.811142 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.811221 4652 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.811112 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.811321 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.811339 4652 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.811376 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:02.811366076 +0000 UTC m=+25.048096344 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:58 crc kubenswrapper[4652]: E1205 05:26:58.811456 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:02.811444564 +0000 UTC m=+25.048174830 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.865915 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.865946 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.865956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.865972 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.865983 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.968286 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.968356 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.968367 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.968383 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:58 crc kubenswrapper[4652]: I1205 05:26:58.968395 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:58Z","lastTransitionTime":"2025-12-05T05:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.071124 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.071173 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.071182 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.071194 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.071204 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.125006 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.125096 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.125047 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:26:59 crc kubenswrapper[4652]: E1205 05:26:59.125335 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:26:59 crc kubenswrapper[4652]: E1205 05:26:59.125447 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:26:59 crc kubenswrapper[4652]: E1205 05:26:59.125608 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.173508 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.173616 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.173676 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.173752 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.173820 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.276374 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.276404 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.276415 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.276428 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.276439 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.378202 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.378292 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.378358 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.378422 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.378481 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.480950 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.481002 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.481017 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.481035 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.481052 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.583092 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.583140 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.583152 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.583165 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.583174 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.684872 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.684908 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.684919 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.684929 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.684942 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.787158 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.787198 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.787209 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.787223 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.787232 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.889547 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.889603 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.889612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.889624 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.889633 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.991993 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.992053 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.992070 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.992095 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:26:59 crc kubenswrapper[4652]: I1205 05:26:59.992108 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:26:59Z","lastTransitionTime":"2025-12-05T05:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.094847 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.094885 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.094893 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.094922 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.094933 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.196941 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.196980 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.196991 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.197022 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.197033 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.299406 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.299454 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.299463 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.299486 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.299499 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.401751 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.401795 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.401804 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.401822 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.401834 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.503725 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.503783 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.503797 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.503811 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.503820 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.606297 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.606358 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.606366 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.606383 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.606394 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.708239 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.708274 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.708284 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.708297 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.708305 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.810223 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.810246 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.810255 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.810266 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.810273 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.912009 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.912047 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.912058 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.912074 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:00 crc kubenswrapper[4652]: I1205 05:27:00.912084 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:00Z","lastTransitionTime":"2025-12-05T05:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.014637 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.014669 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.014678 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.014689 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.014697 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.117290 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.117327 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.117336 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.117350 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.117359 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.125714 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.125752 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:01 crc kubenswrapper[4652]: E1205 05:27:01.125825 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.125753 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:01 crc kubenswrapper[4652]: E1205 05:27:01.125957 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:01 crc kubenswrapper[4652]: E1205 05:27:01.126090 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.219375 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.219420 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.219432 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.219452 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.219466 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.321894 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.321932 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.321963 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.321981 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.321989 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.424455 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.424491 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.424503 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.424518 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.424528 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.526798 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.526858 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.526872 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.526887 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.526899 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.629133 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.629171 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.629180 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.629196 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.629209 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.731176 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.731223 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.731236 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.731253 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.731264 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.834030 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.834071 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.834083 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.834100 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.834111 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.936364 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.936416 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.936428 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.936458 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:01 crc kubenswrapper[4652]: I1205 05:27:01.936473 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:01Z","lastTransitionTime":"2025-12-05T05:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.038213 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.038251 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.038262 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.038310 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.038320 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.140538 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.140593 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.140604 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.140620 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.140629 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.221502 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-nfbsv"] Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.222047 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-s4t24"] Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.222268 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-nfbsv" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.222537 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.224599 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.224724 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.224725 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.224830 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.225270 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.225366 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.225403 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.226791 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.237167 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0331197d-08f0-4dec-8d8a-72e6019bd2eb-rootfs\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.237209 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrxgt\" (UniqueName: \"kubernetes.io/projected/0331197d-08f0-4dec-8d8a-72e6019bd2eb-kube-api-access-rrxgt\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.237257 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxjm9\" (UniqueName: \"kubernetes.io/projected/c29b6caf-4921-4f3c-a3b7-31abdcba038a-kube-api-access-pxjm9\") pod \"node-resolver-nfbsv\" (UID: \"c29b6caf-4921-4f3c-a3b7-31abdcba038a\") " pod="openshift-dns/node-resolver-nfbsv" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.237309 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c29b6caf-4921-4f3c-a3b7-31abdcba038a-hosts-file\") pod \"node-resolver-nfbsv\" (UID: \"c29b6caf-4921-4f3c-a3b7-31abdcba038a\") " pod="openshift-dns/node-resolver-nfbsv" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.237350 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0331197d-08f0-4dec-8d8a-72e6019bd2eb-proxy-tls\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.237371 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0331197d-08f0-4dec-8d8a-72e6019bd2eb-mcd-auth-proxy-config\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.238795 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.242744 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.242789 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.242803 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.242823 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.242838 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.250361 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.259955 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.270934 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.281812 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.298799 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.308100 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.316999 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.329579 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.337820 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c29b6caf-4921-4f3c-a3b7-31abdcba038a-hosts-file\") pod \"node-resolver-nfbsv\" (UID: \"c29b6caf-4921-4f3c-a3b7-31abdcba038a\") " pod="openshift-dns/node-resolver-nfbsv" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.337864 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0331197d-08f0-4dec-8d8a-72e6019bd2eb-proxy-tls\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.337883 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0331197d-08f0-4dec-8d8a-72e6019bd2eb-mcd-auth-proxy-config\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.337901 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0331197d-08f0-4dec-8d8a-72e6019bd2eb-rootfs\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.337918 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrxgt\" (UniqueName: \"kubernetes.io/projected/0331197d-08f0-4dec-8d8a-72e6019bd2eb-kube-api-access-rrxgt\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.337941 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxjm9\" (UniqueName: \"kubernetes.io/projected/c29b6caf-4921-4f3c-a3b7-31abdcba038a-kube-api-access-pxjm9\") pod \"node-resolver-nfbsv\" (UID: \"c29b6caf-4921-4f3c-a3b7-31abdcba038a\") " pod="openshift-dns/node-resolver-nfbsv" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.337958 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c29b6caf-4921-4f3c-a3b7-31abdcba038a-hosts-file\") pod \"node-resolver-nfbsv\" (UID: \"c29b6caf-4921-4f3c-a3b7-31abdcba038a\") " pod="openshift-dns/node-resolver-nfbsv" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.338105 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0331197d-08f0-4dec-8d8a-72e6019bd2eb-rootfs\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.338682 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0331197d-08f0-4dec-8d8a-72e6019bd2eb-mcd-auth-proxy-config\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.339803 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.342201 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0331197d-08f0-4dec-8d8a-72e6019bd2eb-proxy-tls\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.345105 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.345139 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.345150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.345166 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.345176 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.348760 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.351655 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxjm9\" (UniqueName: \"kubernetes.io/projected/c29b6caf-4921-4f3c-a3b7-31abdcba038a-kube-api-access-pxjm9\") pod \"node-resolver-nfbsv\" (UID: \"c29b6caf-4921-4f3c-a3b7-31abdcba038a\") " pod="openshift-dns/node-resolver-nfbsv" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.351795 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrxgt\" (UniqueName: \"kubernetes.io/projected/0331197d-08f0-4dec-8d8a-72e6019bd2eb-kube-api-access-rrxgt\") pod \"machine-config-daemon-s4t24\" (UID: \"0331197d-08f0-4dec-8d8a-72e6019bd2eb\") " pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.358260 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.366075 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.378533 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.390704 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.407408 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.419839 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.442214 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.447135 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.447230 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.447302 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.447363 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.447412 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.455251 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.533761 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.538779 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-nfbsv" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.550293 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.550325 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.550334 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.550350 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.550359 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: W1205 05:27:02.550585 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc29b6caf_4921_4f3c_a3b7_31abdcba038a.slice/crio-c890d73295cbb6312bb3098b6ed35379a7e0cddf44d76b8b59c4c1d0c83e25f7 WatchSource:0}: Error finding container c890d73295cbb6312bb3098b6ed35379a7e0cddf44d76b8b59c4c1d0c83e25f7: Status 404 returned error can't find the container with id c890d73295cbb6312bb3098b6ed35379a7e0cddf44d76b8b59c4c1d0c83e25f7 Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.604662 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-569vn"] Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.605443 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-tfrqf"] Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.605770 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.606469 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.606491 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-94kb9"] Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.608432 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.608435 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.608572 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.608745 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.608891 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.608992 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.609033 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.609791 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.612966 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.613092 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.613188 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.613257 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.613371 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.613476 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.613519 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.621081 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.633076 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.640926 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-socket-dir-parent\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.640963 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-hostroot\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.640984 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-netd\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641003 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-systemd\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641018 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-bin\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641034 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvnhn\" (UniqueName: \"kubernetes.io/projected/ab3e4ec7-1775-48b7-8848-a578578629df-kube-api-access-nvnhn\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641049 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-kubelet\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641066 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-ovn-kubernetes\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641081 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-env-overrides\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641143 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhbnq\" (UniqueName: \"kubernetes.io/projected/39d571ff-6054-4804-b819-bcee09f6ed35-kube-api-access-hhbnq\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641169 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ab3e4ec7-1775-48b7-8848-a578578629df-ovn-node-metrics-cert\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641189 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-k8s-cni-cncf-io\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641216 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/57ea6288-d271-498d-ad7e-aa90f3d433e4-cni-binary-copy\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641239 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-os-release\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641253 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-conf-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641287 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-systemd-units\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641312 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-var-lib-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641332 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-daemon-config\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641356 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-etc-kubernetes\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641381 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-cnibin\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641395 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-multus-certs\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641412 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/39d571ff-6054-4804-b819-bcee09f6ed35-cni-binary-copy\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641427 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-tuning-conf-dir\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641443 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-netns\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641460 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg2ht\" (UniqueName: \"kubernetes.io/projected/57ea6288-d271-498d-ad7e-aa90f3d433e4-kube-api-access-xg2ht\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641477 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641493 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-config\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641506 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-script-lib\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641520 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-os-release\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641535 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-cni-bin\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641578 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641593 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-etc-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641610 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-ovn\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641627 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-netns\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641645 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-kubelet\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641663 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-cnibin\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641678 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-log-socket\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641692 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-system-cni-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641705 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-cni-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641728 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-system-cni-dir\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641782 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-slash\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641814 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-node-log\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641830 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-cni-multus\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.641855 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/39d571ff-6054-4804-b819-bcee09f6ed35-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.643461 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.655402 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.655858 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.655897 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.655907 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.655922 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.655933 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.664679 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.674140 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.690426 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.711234 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.730838 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.740613 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742166 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742275 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-bin\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742302 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvnhn\" (UniqueName: \"kubernetes.io/projected/ab3e4ec7-1775-48b7-8848-a578578629df-kube-api-access-nvnhn\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742321 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-systemd\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742338 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-ovn-kubernetes\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742359 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-bin\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.742372 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:10.742354079 +0000 UTC m=+32.979084346 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742387 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-ovn-kubernetes\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742398 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-env-overrides\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742424 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-systemd\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742426 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-kubelet\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742449 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-kubelet\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742452 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ab3e4ec7-1775-48b7-8848-a578578629df-ovn-node-metrics-cert\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742482 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-k8s-cni-cncf-io\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742506 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhbnq\" (UniqueName: \"kubernetes.io/projected/39d571ff-6054-4804-b819-bcee09f6ed35-kube-api-access-hhbnq\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742526 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/57ea6288-d271-498d-ad7e-aa90f3d433e4-cni-binary-copy\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742569 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-os-release\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742590 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-conf-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742611 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742637 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-daemon-config\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742650 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-systemd-units\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742664 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-var-lib-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742686 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-etc-kubernetes\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742702 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-tuning-conf-dir\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742717 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-netns\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742731 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-cnibin\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742746 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-multus-certs\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742763 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/39d571ff-6054-4804-b819-bcee09f6ed35-cni-binary-copy\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742779 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742809 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-config\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742826 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg2ht\" (UniqueName: \"kubernetes.io/projected/57ea6288-d271-498d-ad7e-aa90f3d433e4-kube-api-access-xg2ht\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742842 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742858 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-script-lib\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742872 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-os-release\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742885 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-cni-bin\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742902 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742917 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-kubelet\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742934 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-etc-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742949 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-ovn\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742964 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-netns\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742987 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-system-cni-dir\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743003 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-cnibin\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743017 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-log-socket\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743022 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-env-overrides\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743031 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-system-cni-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743053 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-cni-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743064 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-conf-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743071 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/39d571ff-6054-4804-b819-bcee09f6ed35-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743089 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-slash\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743104 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-node-log\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743118 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-cni-multus\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743146 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-netd\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743161 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-socket-dir-parent\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743175 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-hostroot\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743224 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-hostroot\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.743263 4652 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.743332 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:10.74331221 +0000 UTC m=+32.980042477 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743360 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-systemd-units\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743368 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-var-lib-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743501 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-tuning-conf-dir\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743533 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/57ea6288-d271-498d-ad7e-aa90f3d433e4-cni-binary-copy\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742572 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-k8s-cni-cncf-io\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743597 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-os-release\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743597 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-etc-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743616 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-netns\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743645 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-openvswitch\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743722 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-system-cni-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743766 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-ovn\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743772 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-cnibin\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743801 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-netns\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743804 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-system-cni-dir\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743824 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-run-multus-certs\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743834 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/39d571ff-6054-4804-b819-bcee09f6ed35-cnibin\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743861 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-log-socket\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743889 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-node-log\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.743942 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-cni-dir\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744009 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-daemon-config\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744057 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-slash\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744209 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-script-lib\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744266 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-os-release\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744292 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-cni-bin\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.744344 4652 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.744382 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:10.744371972 +0000 UTC m=+32.981102240 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744395 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/39d571ff-6054-4804-b819-bcee09f6ed35-cni-binary-copy\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.742947 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-etc-kubernetes\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744412 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-kubelet\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744433 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-netd\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744461 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744464 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-multus-socket-dir-parent\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744480 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/57ea6288-d271-498d-ad7e-aa90f3d433e4-host-var-lib-cni-multus\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744530 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/39d571ff-6054-4804-b819-bcee09f6ed35-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.744550 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-config\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.745573 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ab3e4ec7-1775-48b7-8848-a578578629df-ovn-node-metrics-cert\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.750980 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.757362 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.757396 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.757408 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.757435 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.757447 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.757655 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhbnq\" (UniqueName: \"kubernetes.io/projected/39d571ff-6054-4804-b819-bcee09f6ed35-kube-api-access-hhbnq\") pod \"multus-additional-cni-plugins-tfrqf\" (UID: \"39d571ff-6054-4804-b819-bcee09f6ed35\") " pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.758734 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg2ht\" (UniqueName: \"kubernetes.io/projected/57ea6288-d271-498d-ad7e-aa90f3d433e4-kube-api-access-xg2ht\") pod \"multus-569vn\" (UID: \"57ea6288-d271-498d-ad7e-aa90f3d433e4\") " pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.759090 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvnhn\" (UniqueName: \"kubernetes.io/projected/ab3e4ec7-1775-48b7-8848-a578578629df-kube-api-access-nvnhn\") pod \"ovnkube-node-94kb9\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.761634 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.770728 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.779838 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.789204 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.798585 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.817365 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.827673 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.836292 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.843711 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.843768 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.843892 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.843922 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.843930 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.843939 4652 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.843949 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.843963 4652 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.843987 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:10.843974044 +0000 UTC m=+33.080704312 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:02 crc kubenswrapper[4652]: E1205 05:27:02.844022 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:10.84400913 +0000 UTC m=+33.080739397 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.845503 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.857072 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.859910 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.859946 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.859958 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.859975 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.859986 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.866852 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.875984 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.886480 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:02Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.919678 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-569vn" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.929732 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" Dec 05 05:27:02 crc kubenswrapper[4652]: W1205 05:27:02.930931 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57ea6288_d271_498d_ad7e_aa90f3d433e4.slice/crio-c388526da91191c1131e7f2fb3844eb5cfee3df5408c929428d9374e2884af37 WatchSource:0}: Error finding container c388526da91191c1131e7f2fb3844eb5cfee3df5408c929428d9374e2884af37: Status 404 returned error can't find the container with id c388526da91191c1131e7f2fb3844eb5cfee3df5408c929428d9374e2884af37 Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.933543 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:02 crc kubenswrapper[4652]: W1205 05:27:02.946714 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39d571ff_6054_4804_b819_bcee09f6ed35.slice/crio-10897df7d2e721b3e6683fc5247432673238286d9a7166b263dbafd58c489862 WatchSource:0}: Error finding container 10897df7d2e721b3e6683fc5247432673238286d9a7166b263dbafd58c489862: Status 404 returned error can't find the container with id 10897df7d2e721b3e6683fc5247432673238286d9a7166b263dbafd58c489862 Dec 05 05:27:02 crc kubenswrapper[4652]: W1205 05:27:02.953329 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab3e4ec7_1775_48b7_8848_a578578629df.slice/crio-45151eb2c12f06db353156ae9ada8e149602f0efe070c7256168fc8f9b25b890 WatchSource:0}: Error finding container 45151eb2c12f06db353156ae9ada8e149602f0efe070c7256168fc8f9b25b890: Status 404 returned error can't find the container with id 45151eb2c12f06db353156ae9ada8e149602f0efe070c7256168fc8f9b25b890 Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.963596 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.963640 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.963651 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.963673 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:02 crc kubenswrapper[4652]: I1205 05:27:02.963687 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:02Z","lastTransitionTime":"2025-12-05T05:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.066670 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.066718 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.066733 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.066758 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.066771 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.125464 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.125513 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.125592 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:03 crc kubenswrapper[4652]: E1205 05:27:03.125642 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:03 crc kubenswrapper[4652]: E1205 05:27:03.125783 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:03 crc kubenswrapper[4652]: E1205 05:27:03.126001 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.168941 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.168968 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.168979 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.168990 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.168999 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.246024 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.246096 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.246110 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"36fd73a2bae289a606b4b553402582b5f0233aba00e6d8539b1801a9dcd5a7fb"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.247298 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae" exitCode=0 Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.247364 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.247399 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"45151eb2c12f06db353156ae9ada8e149602f0efe070c7256168fc8f9b25b890"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.249345 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-569vn" event={"ID":"57ea6288-d271-498d-ad7e-aa90f3d433e4","Type":"ContainerStarted","Data":"33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.249382 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-569vn" event={"ID":"57ea6288-d271-498d-ad7e-aa90f3d433e4","Type":"ContainerStarted","Data":"c388526da91191c1131e7f2fb3844eb5cfee3df5408c929428d9374e2884af37"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.251158 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-nfbsv" event={"ID":"c29b6caf-4921-4f3c-a3b7-31abdcba038a","Type":"ContainerStarted","Data":"7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.251181 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-nfbsv" event={"ID":"c29b6caf-4921-4f3c-a3b7-31abdcba038a","Type":"ContainerStarted","Data":"c890d73295cbb6312bb3098b6ed35379a7e0cddf44d76b8b59c4c1d0c83e25f7"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.253042 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" event={"ID":"39d571ff-6054-4804-b819-bcee09f6ed35","Type":"ContainerStarted","Data":"10897df7d2e721b3e6683fc5247432673238286d9a7166b263dbafd58c489862"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.257471 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.269962 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.271482 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.271523 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.271533 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.271548 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.271574 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.283000 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.293465 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.304979 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.315585 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.325512 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.335408 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.362017 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.370076 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.373784 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.373815 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.373826 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.373842 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.373856 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.385107 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.394676 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.403980 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.410891 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.440020 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.457694 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.468701 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.476096 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.476135 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.476145 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.476158 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.476167 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.495272 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.503919 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.515184 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.523730 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.533453 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.544727 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.560842 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.570376 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.578654 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.578682 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.578692 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.578706 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.578716 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.580912 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:03Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.681406 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.681612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.681623 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.681637 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.681648 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.783690 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.783732 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.783743 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.783761 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.783772 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.885609 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.885986 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.886003 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.886029 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.886044 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.988764 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.988809 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.988819 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.988837 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:03 crc kubenswrapper[4652]: I1205 05:27:03.988848 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:03Z","lastTransitionTime":"2025-12-05T05:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.047635 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-5cpl7"] Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.048050 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.049697 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.049860 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.050210 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.050644 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.062820 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.072541 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.086611 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.091157 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.091199 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.091213 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.091234 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.091247 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.098157 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.108104 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.116669 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.126704 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.136589 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.146678 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.158142 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmcmr\" (UniqueName: \"kubernetes.io/projected/f611565f-a3f7-4dec-98c8-cc6c022ec406-kube-api-access-tmcmr\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.158360 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f611565f-a3f7-4dec-98c8-cc6c022ec406-serviceca\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.158395 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f611565f-a3f7-4dec-98c8-cc6c022ec406-host\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.158799 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.166496 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.175485 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.183945 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.191414 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.192956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.192994 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.193007 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.193021 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.193035 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.257764 4652 generic.go:334] "Generic (PLEG): container finished" podID="39d571ff-6054-4804-b819-bcee09f6ed35" containerID="1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c" exitCode=0 Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.257880 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" event={"ID":"39d571ff-6054-4804-b819-bcee09f6ed35","Type":"ContainerDied","Data":"1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.258817 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f611565f-a3f7-4dec-98c8-cc6c022ec406-serviceca\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.258870 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f611565f-a3f7-4dec-98c8-cc6c022ec406-host\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.258927 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmcmr\" (UniqueName: \"kubernetes.io/projected/f611565f-a3f7-4dec-98c8-cc6c022ec406-kube-api-access-tmcmr\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.259007 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f611565f-a3f7-4dec-98c8-cc6c022ec406-host\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.261117 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f611565f-a3f7-4dec-98c8-cc6c022ec406-serviceca\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.262658 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.262702 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.262717 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.262728 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.262739 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.262752 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.269958 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.279421 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.281479 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmcmr\" (UniqueName: \"kubernetes.io/projected/f611565f-a3f7-4dec-98c8-cc6c022ec406-kube-api-access-tmcmr\") pod \"node-ca-5cpl7\" (UID: \"f611565f-a3f7-4dec-98c8-cc6c022ec406\") " pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.294841 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.294880 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.294896 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.294912 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.294927 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.311366 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.351340 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.360247 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-5cpl7" Dec 05 05:27:04 crc kubenswrapper[4652]: W1205 05:27:04.372899 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf611565f_a3f7_4dec_98c8_cc6c022ec406.slice/crio-bc188887d63837bdaf1f707d583a21b215aaa55f13cea153101852b793057e1b WatchSource:0}: Error finding container bc188887d63837bdaf1f707d583a21b215aaa55f13cea153101852b793057e1b: Status 404 returned error can't find the container with id bc188887d63837bdaf1f707d583a21b215aaa55f13cea153101852b793057e1b Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.391085 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.398025 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.398055 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.398065 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.398082 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.398096 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.430406 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.471518 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.500947 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.500976 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.500986 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.501001 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.501016 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.510046 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.552006 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.592970 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.603536 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.603625 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.603660 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.603684 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.603697 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.629169 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.672761 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.705911 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.705951 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.705966 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.705984 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.705995 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.711203 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.755999 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.809035 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.809070 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.809081 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.809099 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.809113 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.911722 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.911983 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.911994 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.912015 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:04 crc kubenswrapper[4652]: I1205 05:27:04.912025 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:04Z","lastTransitionTime":"2025-12-05T05:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.014028 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.014060 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.014069 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.014083 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.014093 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.116525 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.116580 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.116590 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.116609 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.116620 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.124990 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.124990 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.124994 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:05 crc kubenswrapper[4652]: E1205 05:27:05.125201 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:05 crc kubenswrapper[4652]: E1205 05:27:05.125095 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:05 crc kubenswrapper[4652]: E1205 05:27:05.125335 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.218097 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.218139 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.218150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.218165 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.218177 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.268507 4652 generic.go:334] "Generic (PLEG): container finished" podID="39d571ff-6054-4804-b819-bcee09f6ed35" containerID="3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea" exitCode=0 Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.268610 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" event={"ID":"39d571ff-6054-4804-b819-bcee09f6ed35","Type":"ContainerDied","Data":"3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.270001 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-5cpl7" event={"ID":"f611565f-a3f7-4dec-98c8-cc6c022ec406","Type":"ContainerStarted","Data":"abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.270028 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-5cpl7" event={"ID":"f611565f-a3f7-4dec-98c8-cc6c022ec406","Type":"ContainerStarted","Data":"bc188887d63837bdaf1f707d583a21b215aaa55f13cea153101852b793057e1b"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.280479 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.290052 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.304657 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.316771 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.319977 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.320007 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.320018 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.320031 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.320042 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.326930 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.335658 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.345206 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.373205 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.392654 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.415175 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.422132 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.422166 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.422176 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.422190 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.422200 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.422952 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.433674 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.442914 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.449818 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.459308 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.467911 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.478097 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.489040 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.512636 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.524262 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.524301 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.524312 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.524327 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.524336 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.549724 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.592282 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.631149 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.631197 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.631209 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.631229 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.631241 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.631820 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.669487 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.713373 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.733035 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.733068 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.733077 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.733092 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.733105 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.752253 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.791476 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.835201 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.835432 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.835445 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.835467 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.835486 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.837938 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.872422 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:05Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.937540 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.937604 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.937615 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.937631 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:05 crc kubenswrapper[4652]: I1205 05:27:05.937645 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:05Z","lastTransitionTime":"2025-12-05T05:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.040099 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.040149 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.040161 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.040180 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.040192 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.142081 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.142140 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.142151 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.142171 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.142184 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.244710 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.244749 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.244759 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.244778 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.244791 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.276906 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.278975 4652 generic.go:334] "Generic (PLEG): container finished" podID="39d571ff-6054-4804-b819-bcee09f6ed35" containerID="1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b" exitCode=0 Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.279012 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" event={"ID":"39d571ff-6054-4804-b819-bcee09f6ed35","Type":"ContainerDied","Data":"1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.291507 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.304550 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.319250 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.329816 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.341101 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.346442 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.346475 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.346485 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.346499 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.346511 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.351533 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.359380 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.370822 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.380304 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.394116 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.406067 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.416098 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.424931 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.432902 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:06Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.448883 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.448917 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.448928 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.448945 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.448957 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.550828 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.550964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.551059 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.551158 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.551250 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.653115 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.653295 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.653306 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.653334 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.653347 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.755401 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.755437 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.755449 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.755463 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.755480 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.858164 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.858195 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.858206 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.858223 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.858234 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.960228 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.960304 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.960314 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.960328 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:06 crc kubenswrapper[4652]: I1205 05:27:06.960339 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:06Z","lastTransitionTime":"2025-12-05T05:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.062667 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.062710 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.062720 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.062768 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.062787 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.125698 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.125808 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:07 crc kubenswrapper[4652]: E1205 05:27:07.125815 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:07 crc kubenswrapper[4652]: E1205 05:27:07.125991 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.125704 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:07 crc kubenswrapper[4652]: E1205 05:27:07.126138 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.164177 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.164207 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.164216 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.164231 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.164243 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.266187 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.266227 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.266238 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.266251 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.266263 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.286679 4652 generic.go:334] "Generic (PLEG): container finished" podID="39d571ff-6054-4804-b819-bcee09f6ed35" containerID="097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d" exitCode=0 Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.286725 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" event={"ID":"39d571ff-6054-4804-b819-bcee09f6ed35","Type":"ContainerDied","Data":"097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.297504 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.312806 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.323751 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.334206 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.343858 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.356444 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.367541 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.369093 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.369137 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.369147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.369161 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.369170 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.379209 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.387908 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.398531 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.408111 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.416828 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.427620 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.437692 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.471818 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.471845 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.471855 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.471869 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.471884 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.575980 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.576088 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.576167 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.576237 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.576293 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.678270 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.678307 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.678318 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.678336 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.678349 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.780461 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.780590 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.780670 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.780746 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.780807 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.883013 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.883108 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.883181 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.883259 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.883335 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.985988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.986038 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.986051 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.986063 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:07 crc kubenswrapper[4652]: I1205 05:27:07.986071 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:07Z","lastTransitionTime":"2025-12-05T05:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.089926 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.090033 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.090313 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.090349 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.090364 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.137287 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.147948 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.157250 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.167824 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.178811 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.191872 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.191946 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.191969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.191988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.192003 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.193293 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.204995 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.216146 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.227254 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.237435 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.248135 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.258158 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.268544 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.281273 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.292539 4652 generic.go:334] "Generic (PLEG): container finished" podID="39d571ff-6054-4804-b819-bcee09f6ed35" containerID="9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94" exitCode=0 Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.292629 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" event={"ID":"39d571ff-6054-4804-b819-bcee09f6ed35","Type":"ContainerDied","Data":"9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.294040 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.294082 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.294094 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.294109 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.294131 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.299207 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.299870 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.299904 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.303802 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.316733 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.328145 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.332113 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.332525 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.341899 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.350792 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.361525 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.374086 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.384475 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.396810 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.397493 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.397524 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.397578 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.397604 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.397672 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.401501 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.401534 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.401546 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.401578 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.401592 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.408446 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: E1205 05:27:08.412934 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.416848 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.416875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.416886 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.416899 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.416910 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.418108 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: E1205 05:27:08.427405 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.429700 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.430289 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.430322 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.430334 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.430357 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.430368 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.437534 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: E1205 05:27:08.439390 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.441811 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.441845 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.441858 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.441872 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.441883 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.450779 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: E1205 05:27:08.450930 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.453952 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.453969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.453977 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.453987 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.453996 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.464070 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: E1205 05:27:08.465747 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: E1205 05:27:08.465857 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.473899 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.482014 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.489655 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.500027 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.500060 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.500068 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.500084 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.500094 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.500989 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.509967 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.518289 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.527852 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.536456 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.546982 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.557165 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.590668 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.602820 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.602848 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.602862 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.602885 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.602896 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.631488 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.669365 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.705914 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.705954 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.705963 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.705983 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.705996 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.807785 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.808145 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.808157 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.808175 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.808189 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.910080 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.910114 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.910131 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.910144 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:08 crc kubenswrapper[4652]: I1205 05:27:08.910154 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:08Z","lastTransitionTime":"2025-12-05T05:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.011740 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.011781 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.011790 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.011804 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.011813 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.114386 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.114421 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.114432 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.114449 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.114460 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.124658 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.124692 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.124664 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:09 crc kubenswrapper[4652]: E1205 05:27:09.124783 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:09 crc kubenswrapper[4652]: E1205 05:27:09.124898 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:09 crc kubenswrapper[4652]: E1205 05:27:09.124996 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.216213 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.216249 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.216261 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.216275 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.216287 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.306171 4652 generic.go:334] "Generic (PLEG): container finished" podID="39d571ff-6054-4804-b819-bcee09f6ed35" containerID="dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d" exitCode=0 Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.306237 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" event={"ID":"39d571ff-6054-4804-b819-bcee09f6ed35","Type":"ContainerDied","Data":"dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.306343 4652 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.317862 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.317900 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.317909 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.317923 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.317933 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.318656 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.328933 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.343620 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.354242 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.363791 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.373028 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.383219 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.390311 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.400187 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.412243 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.420703 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.420752 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.420766 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.420787 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.420799 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.426961 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.436384 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.446782 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.454239 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:09Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.522712 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.522751 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.522762 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.522779 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.522790 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.625921 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.625973 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.625989 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.626005 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.626017 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.728149 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.728188 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.728198 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.728211 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.728222 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.830550 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.830608 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.830617 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.830633 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.830642 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.934059 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.934093 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.934104 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.934117 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:09 crc kubenswrapper[4652]: I1205 05:27:09.934135 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:09Z","lastTransitionTime":"2025-12-05T05:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.036549 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.036620 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.036634 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.036657 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.036673 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.138538 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.138584 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.138595 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.138608 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.138621 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.240988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.241034 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.241046 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.241068 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.241081 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.311536 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/0.log" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.314707 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8" exitCode=1 Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.314746 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.315299 4652 scope.go:117] "RemoveContainer" containerID="5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.318728 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" event={"ID":"39d571ff-6054-4804-b819-bcee09f6ed35","Type":"ContainerStarted","Data":"19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.328882 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.338222 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.342506 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.342534 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.342544 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.342572 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.342584 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.347405 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.356912 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.365000 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.378652 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"message\\\":\\\"86 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:27:10.212214 5986 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 05:27:10.212659 5986 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:27:10.212729 5986 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213067 5986 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213668 5986 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 05:27:10.213692 5986 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:27:10.213739 5986 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213764 5986 factory.go:656] Stopping watch factory\\\\nI1205 05:27:10.213779 5986 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 05:27:10.213794 5986 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.404530 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.421081 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.444721 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.444745 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.444756 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.444775 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.444790 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.445296 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.461507 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.475502 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.484810 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.505222 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.513049 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.521941 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.530028 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.543070 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"message\\\":\\\"86 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:27:10.212214 5986 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 05:27:10.212659 5986 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:27:10.212729 5986 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213067 5986 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213668 5986 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 05:27:10.213692 5986 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:27:10.213739 5986 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213764 5986 factory.go:656] Stopping watch factory\\\\nI1205 05:27:10.213779 5986 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 05:27:10.213794 5986 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.547748 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.547798 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.547815 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.547843 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.547857 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.554326 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.564378 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.579168 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.592414 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.600740 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.617151 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.632878 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.644097 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.650585 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.650619 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.650637 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.650653 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.650663 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.653978 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.673470 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.682420 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.752968 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.753021 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.753034 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.753055 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.753071 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.820527 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.820704 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.820780 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.820860 4652 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.820905 4652 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.820863 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:26.82083329 +0000 UTC m=+49.057563557 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.821035 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:26.821023247 +0000 UTC m=+49.057753513 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.821061 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:26.821052201 +0000 UTC m=+49.057782469 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.855244 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.855286 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.855299 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.855321 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.855333 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.922732 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.922806 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.923004 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.923060 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.923076 4652 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.923164 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:26.923139395 +0000 UTC m=+49.159869662 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.923238 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.923273 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.923290 4652 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:10 crc kubenswrapper[4652]: E1205 05:27:10.923516 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:26.923491286 +0000 UTC m=+49.160221554 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.958711 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.958754 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.958766 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.958786 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.958803 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:10Z","lastTransitionTime":"2025-12-05T05:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:10 crc kubenswrapper[4652]: I1205 05:27:10.996759 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.005372 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.016075 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.025348 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.034854 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.045012 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.054173 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.061785 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.061821 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.061834 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.061851 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.061862 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.063940 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.071114 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.080697 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.089043 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.102681 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"message\\\":\\\"86 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:27:10.212214 5986 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 05:27:10.212659 5986 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:27:10.212729 5986 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213067 5986 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213668 5986 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 05:27:10.213692 5986 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:27:10.213739 5986 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213764 5986 factory.go:656] Stopping watch factory\\\\nI1205 05:27:10.213779 5986 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 05:27:10.213794 5986 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.113437 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.123372 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.125105 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.125135 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.125105 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:11 crc kubenswrapper[4652]: E1205 05:27:11.125231 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:11 crc kubenswrapper[4652]: E1205 05:27:11.125315 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:11 crc kubenswrapper[4652]: E1205 05:27:11.125372 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.133080 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.164580 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.164611 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.164621 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.164633 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.164645 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.267579 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.267631 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.267643 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.267665 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.267693 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.328350 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/1.log" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.329103 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/0.log" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.331646 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57" exitCode=1 Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.331741 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.331833 4652 scope.go:117] "RemoveContainer" containerID="5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.332218 4652 scope.go:117] "RemoveContainer" containerID="f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57" Dec 05 05:27:11 crc kubenswrapper[4652]: E1205 05:27:11.332368 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.343743 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.354359 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.362834 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.369711 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.369741 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.369751 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.369767 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.369779 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.371451 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.380159 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.389449 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.396576 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.405430 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.413279 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.420059 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.429976 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.439245 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.448071 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.472066 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.472102 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.472113 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.472134 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.472146 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.475743 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fdf93c8b1c097663a9d275a197574deb231af4ae9b212e3760368a72ecfe0b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"message\\\":\\\"86 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:27:10.212214 5986 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 05:27:10.212659 5986 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:27:10.212729 5986 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213067 5986 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213668 5986 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 05:27:10.213692 5986 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:27:10.213739 5986 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:27:10.213764 5986 factory.go:656] Stopping watch factory\\\\nI1205 05:27:10.213779 5986 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 05:27:10.213794 5986 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:11Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb\\\\nI1205 05:27:11.023044 6137 services_controller.go:451] Built service openshift-operator-lifecycle-manager/packageserver-service cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.153\\\\\\\", Port:5443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 05:27:11.022984 6137 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1205 05:27:11.023065 6137 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-tfrqf after 0 failed attempt(s)\\\\nI1205 05:27:11.023081 6137 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-tfrqf\\\\nI1205 05:27:11.023009 6137 obj_retry.go:386] Retry su\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.575652 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.575828 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.575895 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.575963 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.576014 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.677910 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.677948 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.677959 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.677976 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.677987 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.780850 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.780886 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.780896 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.780917 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.780930 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.883318 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.883362 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.883371 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.883385 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.883394 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.985444 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.985479 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.985492 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.985508 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:11 crc kubenswrapper[4652]: I1205 05:27:11.985517 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:11Z","lastTransitionTime":"2025-12-05T05:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.086924 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.086957 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.086966 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.086979 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.086989 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.189217 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.189257 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.189268 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.189281 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.189292 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.291581 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.291624 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.291639 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.291655 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.291664 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.335622 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/1.log" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.339020 4652 scope.go:117] "RemoveContainer" containerID="f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57" Dec 05 05:27:12 crc kubenswrapper[4652]: E1205 05:27:12.339236 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.349284 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.358304 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.372185 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:11Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb\\\\nI1205 05:27:11.023044 6137 services_controller.go:451] Built service openshift-operator-lifecycle-manager/packageserver-service cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.153\\\\\\\", Port:5443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 05:27:11.022984 6137 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1205 05:27:11.023065 6137 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-tfrqf after 0 failed attempt(s)\\\\nI1205 05:27:11.023081 6137 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-tfrqf\\\\nI1205 05:27:11.023009 6137 obj_retry.go:386] Retry su\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.385233 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.393988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.394030 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.394043 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.394061 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.394072 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.394134 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.403096 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.412424 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.420875 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.430761 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.437655 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.447096 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.455379 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.463050 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.472222 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.496021 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.496052 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.496061 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.496073 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.496084 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.598045 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.598090 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.598100 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.598113 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.598131 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.699869 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.699902 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.699914 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.699928 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.699938 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.801490 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.801527 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.801538 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.801567 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.801576 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.903094 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.903136 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.903147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.903160 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:12 crc kubenswrapper[4652]: I1205 05:27:12.903170 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:12Z","lastTransitionTime":"2025-12-05T05:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.005240 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.005279 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.005290 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.005305 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.005316 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.107625 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.107692 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.107701 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.107716 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.107724 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.124896 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.124920 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.124921 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:13 crc kubenswrapper[4652]: E1205 05:27:13.125007 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:13 crc kubenswrapper[4652]: E1205 05:27:13.125085 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:13 crc kubenswrapper[4652]: E1205 05:27:13.125173 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.209483 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.209511 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.209521 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.209531 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.209539 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.311728 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.311764 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.311774 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.311788 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.311797 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.414212 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.414253 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.414262 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.414277 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.414287 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.516639 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.516671 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.516681 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.516690 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.516699 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.618747 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.618799 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.618814 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.618833 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.618850 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.720997 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.721029 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.721037 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.721049 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.721058 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.823113 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.823150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.823160 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.823172 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.823181 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.925160 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.925207 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.925219 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.925230 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.925239 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:13Z","lastTransitionTime":"2025-12-05T05:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:13 crc kubenswrapper[4652]: I1205 05:27:13.999469 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph"] Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.000028 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.003502 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.003633 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.010603 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.018189 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.027027 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.027058 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.027066 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.027077 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.027087 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.030469 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:11Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb\\\\nI1205 05:27:11.023044 6137 services_controller.go:451] Built service openshift-operator-lifecycle-manager/packageserver-service cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.153\\\\\\\", Port:5443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 05:27:11.022984 6137 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1205 05:27:11.023065 6137 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-tfrqf after 0 failed attempt(s)\\\\nI1205 05:27:11.023081 6137 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-tfrqf\\\\nI1205 05:27:11.023009 6137 obj_retry.go:386] Retry su\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.042789 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.050441 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/043db566-ff59-4a73-845e-b36a6c80d5a9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.050594 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/043db566-ff59-4a73-845e-b36a6c80d5a9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.050687 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sbwf\" (UniqueName: \"kubernetes.io/projected/043db566-ff59-4a73-845e-b36a6c80d5a9-kube-api-access-9sbwf\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.050787 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/043db566-ff59-4a73-845e-b36a6c80d5a9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.051211 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.058870 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.066135 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.074630 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.083060 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.091285 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.100271 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.107773 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.116734 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.125079 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.128360 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.128392 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.128404 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.128419 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.128431 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.132816 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.151101 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/043db566-ff59-4a73-845e-b36a6c80d5a9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.151138 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sbwf\" (UniqueName: \"kubernetes.io/projected/043db566-ff59-4a73-845e-b36a6c80d5a9-kube-api-access-9sbwf\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.151179 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/043db566-ff59-4a73-845e-b36a6c80d5a9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.151204 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/043db566-ff59-4a73-845e-b36a6c80d5a9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.152002 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/043db566-ff59-4a73-845e-b36a6c80d5a9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.152064 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/043db566-ff59-4a73-845e-b36a6c80d5a9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.156216 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/043db566-ff59-4a73-845e-b36a6c80d5a9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.164527 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sbwf\" (UniqueName: \"kubernetes.io/projected/043db566-ff59-4a73-845e-b36a6c80d5a9-kube-api-access-9sbwf\") pod \"ovnkube-control-plane-749d76644c-2wgph\" (UID: \"043db566-ff59-4a73-845e-b36a6c80d5a9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.229761 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.229795 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.229805 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.229818 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.229829 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.310528 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" Dec 05 05:27:14 crc kubenswrapper[4652]: W1205 05:27:14.319976 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod043db566_ff59_4a73_845e_b36a6c80d5a9.slice/crio-ca16bf3aef0847e4977f16c3fad3ed1b02334e79fa1aacd471cc85b4f8763048 WatchSource:0}: Error finding container ca16bf3aef0847e4977f16c3fad3ed1b02334e79fa1aacd471cc85b4f8763048: Status 404 returned error can't find the container with id ca16bf3aef0847e4977f16c3fad3ed1b02334e79fa1aacd471cc85b4f8763048 Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.331449 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.331477 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.331495 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.331508 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.331518 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.345797 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" event={"ID":"043db566-ff59-4a73-845e-b36a6c80d5a9","Type":"ContainerStarted","Data":"ca16bf3aef0847e4977f16c3fad3ed1b02334e79fa1aacd471cc85b4f8763048"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.433058 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.433094 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.433104 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.433129 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.433141 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.535329 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.535361 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.535369 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.535383 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.535394 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.637170 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.637216 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.637228 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.637244 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.637258 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.738744 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.738783 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.738792 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.738809 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.738820 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.840669 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.840702 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.840711 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.840725 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.840734 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.942709 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.942753 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.942762 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.942776 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:14 crc kubenswrapper[4652]: I1205 05:27:14.942785 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:14Z","lastTransitionTime":"2025-12-05T05:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.044758 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.044795 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.044807 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.044822 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.044832 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.125136 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.125151 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.125159 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:15 crc kubenswrapper[4652]: E1205 05:27:15.125244 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:15 crc kubenswrapper[4652]: E1205 05:27:15.125331 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:15 crc kubenswrapper[4652]: E1205 05:27:15.125410 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.147007 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.147038 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.147047 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.147057 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.147066 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.249090 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.249114 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.249135 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.249147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.249157 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.349878 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" event={"ID":"043db566-ff59-4a73-845e-b36a6c80d5a9","Type":"ContainerStarted","Data":"1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.349911 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" event={"ID":"043db566-ff59-4a73-845e-b36a6c80d5a9","Type":"ContainerStarted","Data":"aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.350246 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.350270 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.350280 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.350293 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.350302 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.358322 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.369801 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.378980 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.387270 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.399948 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:11Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb\\\\nI1205 05:27:11.023044 6137 services_controller.go:451] Built service openshift-operator-lifecycle-manager/packageserver-service cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.153\\\\\\\", Port:5443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 05:27:11.022984 6137 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1205 05:27:11.023065 6137 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-tfrqf after 0 failed attempt(s)\\\\nI1205 05:27:11.023081 6137 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-tfrqf\\\\nI1205 05:27:11.023009 6137 obj_retry.go:386] Retry su\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.408027 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.416065 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.423507 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.430513 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.438789 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.447160 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.451752 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.451779 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.451789 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.451802 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.451811 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.456830 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.463131 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.472692 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.481175 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.553721 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.553754 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.553763 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.553776 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.553787 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.655941 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.655991 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.656003 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.656016 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.656026 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.750780 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-vjg6c"] Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.751243 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:15 crc kubenswrapper[4652]: E1205 05:27:15.751308 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.757480 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.757506 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.757519 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.757529 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.757536 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.760471 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.767920 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.780315 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:11Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb\\\\nI1205 05:27:11.023044 6137 services_controller.go:451] Built service openshift-operator-lifecycle-manager/packageserver-service cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.153\\\\\\\", Port:5443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 05:27:11.022984 6137 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1205 05:27:11.023065 6137 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-tfrqf after 0 failed attempt(s)\\\\nI1205 05:27:11.023081 6137 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-tfrqf\\\\nI1205 05:27:11.023009 6137 obj_retry.go:386] Retry su\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.789393 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.797377 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.830056 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.842053 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.852352 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.859883 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.859977 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.860037 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.860103 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.860168 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.863487 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.867297 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.867351 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shztp\" (UniqueName: \"kubernetes.io/projected/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-kube-api-access-shztp\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.871789 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.881725 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.888689 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.897978 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.905606 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.912607 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.920915 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:15Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.962355 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.962452 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.962522 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.962604 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.962668 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:15Z","lastTransitionTime":"2025-12-05T05:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.967903 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shztp\" (UniqueName: \"kubernetes.io/projected/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-kube-api-access-shztp\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.967969 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:15 crc kubenswrapper[4652]: E1205 05:27:15.968065 4652 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:15 crc kubenswrapper[4652]: E1205 05:27:15.968128 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs podName:72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:16.46810816 +0000 UTC m=+38.704838428 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs") pod "network-metrics-daemon-vjg6c" (UID: "72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:15 crc kubenswrapper[4652]: I1205 05:27:15.981942 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shztp\" (UniqueName: \"kubernetes.io/projected/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-kube-api-access-shztp\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.064995 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.065029 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.065038 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.065057 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.065066 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.167451 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.167491 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.167503 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.167521 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.167540 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.269469 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.269520 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.269530 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.269545 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.269567 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.371436 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.371468 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.371477 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.371488 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.371498 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.473170 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:16 crc kubenswrapper[4652]: E1205 05:27:16.473318 4652 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:16 crc kubenswrapper[4652]: E1205 05:27:16.473368 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs podName:72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:17.473353861 +0000 UTC m=+39.710084138 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs") pod "network-metrics-daemon-vjg6c" (UID: "72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.473369 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.473393 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.473404 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.473417 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.473426 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.575192 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.575222 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.575232 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.575248 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.575256 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.676713 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.676736 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.676745 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.676755 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.676763 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.778734 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.778770 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.778779 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.778793 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.778802 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.880803 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.880832 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.880841 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.880852 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.880859 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.982947 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.982988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.983010 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.983021 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:16 crc kubenswrapper[4652]: I1205 05:27:16.983029 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:16Z","lastTransitionTime":"2025-12-05T05:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.084729 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.084784 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.084793 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.084804 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.084813 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.125330 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:17 crc kubenswrapper[4652]: E1205 05:27:17.125409 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.125330 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.125431 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:17 crc kubenswrapper[4652]: E1205 05:27:17.125483 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:17 crc kubenswrapper[4652]: E1205 05:27:17.125527 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.186388 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.186408 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.186415 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.186425 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.186432 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.288407 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.288445 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.288455 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.288469 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.288478 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.390211 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.390238 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.390247 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.390257 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.390264 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.480882 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:17 crc kubenswrapper[4652]: E1205 05:27:17.481004 4652 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:17 crc kubenswrapper[4652]: E1205 05:27:17.481074 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs podName:72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:19.481058044 +0000 UTC m=+41.717788301 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs") pod "network-metrics-daemon-vjg6c" (UID: "72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.491890 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.491948 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.491958 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.491969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.491976 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.594079 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.594131 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.594143 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.594157 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.594174 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.695805 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.695825 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.695833 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.695841 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.695848 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.797764 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.797814 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.797825 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.797837 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.797844 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.899931 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.899952 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.899960 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.899970 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:17 crc kubenswrapper[4652]: I1205 05:27:17.899978 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:17Z","lastTransitionTime":"2025-12-05T05:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.001942 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.001989 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.002000 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.002012 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.002021 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.103046 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.103073 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.103081 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.103091 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.103098 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.124641 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:18 crc kubenswrapper[4652]: E1205 05:27:18.124758 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.135274 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.143832 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.154792 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.163690 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.171328 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.184103 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:11Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb\\\\nI1205 05:27:11.023044 6137 services_controller.go:451] Built service openshift-operator-lifecycle-manager/packageserver-service cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.153\\\\\\\", Port:5443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 05:27:11.022984 6137 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1205 05:27:11.023065 6137 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-tfrqf after 0 failed attempt(s)\\\\nI1205 05:27:11.023081 6137 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-tfrqf\\\\nI1205 05:27:11.023009 6137 obj_retry.go:386] Retry su\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.192685 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.201038 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.204476 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.204504 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.204513 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.204541 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.204550 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.208957 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.215954 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.222815 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.231656 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.239454 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.248128 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.258173 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.265648 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.306572 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.306597 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.306605 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.306617 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.306626 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.408236 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.408267 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.408279 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.408294 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.408304 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.509955 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.509988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.509999 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.510011 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.510020 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.570257 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.570282 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.570292 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.570304 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.570311 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: E1205 05:27:18.579808 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.582430 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.582458 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.582470 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.582480 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.582487 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: E1205 05:27:18.591005 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.593357 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.593387 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.593397 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.593408 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.593416 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: E1205 05:27:18.601724 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.604071 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.604100 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.604110 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.604138 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.604149 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: E1205 05:27:18.615272 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.617598 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.617640 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.617650 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.617663 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.617671 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: E1205 05:27:18.625809 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:18 crc kubenswrapper[4652]: E1205 05:27:18.625917 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.626981 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.627009 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.627018 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.627045 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.627053 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.729083 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.729139 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.729150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.729162 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.729169 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.831043 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.831173 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.831244 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.831311 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.831373 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.933033 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.933166 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.933246 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.933342 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:18 crc kubenswrapper[4652]: I1205 05:27:18.933407 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:18Z","lastTransitionTime":"2025-12-05T05:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.034943 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.035055 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.035213 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.035281 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.035364 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.124921 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:19 crc kubenswrapper[4652]: E1205 05:27:19.124998 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.125046 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:19 crc kubenswrapper[4652]: E1205 05:27:19.125149 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.125206 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:19 crc kubenswrapper[4652]: E1205 05:27:19.125255 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.136575 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.136620 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.136630 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.136641 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.136648 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.238894 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.238938 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.238948 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.238960 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.238969 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.340683 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.340711 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.340720 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.340733 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.340742 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.442220 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.442253 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.442262 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.442274 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.442281 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.495920 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:19 crc kubenswrapper[4652]: E1205 05:27:19.496043 4652 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:19 crc kubenswrapper[4652]: E1205 05:27:19.496094 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs podName:72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:23.496079822 +0000 UTC m=+45.732810089 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs") pod "network-metrics-daemon-vjg6c" (UID: "72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.543625 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.543654 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.543663 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.543676 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.543684 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.645205 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.645229 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.645238 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.645248 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.645256 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.747172 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.747201 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.747209 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.747221 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.747228 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.849230 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.849255 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.849263 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.849276 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.849284 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.951596 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.951619 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.951628 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.951641 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:19 crc kubenswrapper[4652]: I1205 05:27:19.951648 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:19Z","lastTransitionTime":"2025-12-05T05:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.053513 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.053547 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.053578 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.053590 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.053599 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.125501 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:20 crc kubenswrapper[4652]: E1205 05:27:20.125638 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.155717 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.155747 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.155756 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.155783 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.155791 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.257493 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.257522 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.257532 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.257543 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.257550 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.359895 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.359925 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.359933 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.359944 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.359952 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.461726 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.461752 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.461759 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.461787 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.461799 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.563802 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.563841 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.563850 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.563864 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.563875 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.665828 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.665883 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.665894 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.665906 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.665914 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.768228 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.768261 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.768272 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.768284 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.768292 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.869841 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.869876 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.869886 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.869900 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.869909 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.971720 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.971768 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.971782 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.971799 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:20 crc kubenswrapper[4652]: I1205 05:27:20.971814 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:20Z","lastTransitionTime":"2025-12-05T05:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.073164 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.073198 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.073206 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.073218 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.073228 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.124914 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.124930 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.124994 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:21 crc kubenswrapper[4652]: E1205 05:27:21.125107 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:21 crc kubenswrapper[4652]: E1205 05:27:21.125260 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:21 crc kubenswrapper[4652]: E1205 05:27:21.125361 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.175086 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.175147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.175158 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.175171 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.175180 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.277155 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.277186 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.277195 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.277205 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.277212 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.379048 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.379071 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.379078 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.379088 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.379096 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.480976 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.480998 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.481005 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.481021 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.481030 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.583128 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.583177 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.583185 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.583196 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.583206 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.684930 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.684956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.684964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.684974 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.684982 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.786702 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.786737 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.786746 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.786761 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.786769 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.888435 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.888471 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.888479 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.888494 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.888506 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.990649 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.990691 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.990702 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.990720 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:21 crc kubenswrapper[4652]: I1205 05:27:21.990729 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:21Z","lastTransitionTime":"2025-12-05T05:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.092322 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.092367 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.092378 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.092388 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.092395 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.125811 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:22 crc kubenswrapper[4652]: E1205 05:27:22.126612 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.193793 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.193823 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.193833 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.193844 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.193854 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.295622 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.295652 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.295660 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.295670 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.295677 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.397812 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.397846 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.397855 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.397869 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.397877 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.500654 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.500686 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.500695 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.500705 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.500713 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.602770 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.602797 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.602805 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.602817 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.602827 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.704969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.704994 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.705002 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.705012 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.705031 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.806880 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.806940 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.806951 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.806961 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.806969 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.909038 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.909079 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.909087 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.909103 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:22 crc kubenswrapper[4652]: I1205 05:27:22.909129 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:22Z","lastTransitionTime":"2025-12-05T05:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.013340 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.013570 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.013581 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.013594 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.013603 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.115718 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.115766 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.115777 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.115792 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.115802 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.124921 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.124952 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:23 crc kubenswrapper[4652]: E1205 05:27:23.125012 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:23 crc kubenswrapper[4652]: E1205 05:27:23.125079 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.125183 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:23 crc kubenswrapper[4652]: E1205 05:27:23.125305 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.217050 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.217072 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.217081 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.217091 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.217099 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.318895 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.318936 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.318947 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.318960 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.318968 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.420884 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.420913 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.420922 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.420933 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.420941 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.523260 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.523298 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.523306 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.523319 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.523327 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.527534 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:23 crc kubenswrapper[4652]: E1205 05:27:23.527670 4652 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:23 crc kubenswrapper[4652]: E1205 05:27:23.527728 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs podName:72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:31.527715323 +0000 UTC m=+53.764445591 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs") pod "network-metrics-daemon-vjg6c" (UID: "72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.624708 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.625063 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.625171 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.625239 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.625303 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.726503 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.726532 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.726541 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.726571 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.726593 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.828234 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.828267 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.828277 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.828290 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.828298 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.930023 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.930055 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.930064 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.930075 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:23 crc kubenswrapper[4652]: I1205 05:27:23.930084 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:23Z","lastTransitionTime":"2025-12-05T05:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.031887 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.031926 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.031936 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.031949 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.031958 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.124871 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:24 crc kubenswrapper[4652]: E1205 05:27:24.124968 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.133661 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.133690 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.133698 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.133711 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.133718 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.235665 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.235696 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.235705 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.235720 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.235728 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.337630 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.337660 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.337669 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.337679 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.337688 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.439138 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.439165 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.439175 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.439185 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.439192 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.541632 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.541681 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.541691 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.541701 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.541707 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.643183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.643212 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.643221 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.643238 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.643247 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.744724 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.744758 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.744771 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.744783 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.744792 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.804280 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.804902 4652 scope.go:117] "RemoveContainer" containerID="f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.846178 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.846208 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.846217 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.846230 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.846239 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.947784 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.947811 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.947821 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.947832 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:24 crc kubenswrapper[4652]: I1205 05:27:24.947840 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:24Z","lastTransitionTime":"2025-12-05T05:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.049740 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.049771 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.049779 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.049791 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.049800 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.125621 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.125661 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.125687 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:25 crc kubenswrapper[4652]: E1205 05:27:25.125727 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:25 crc kubenswrapper[4652]: E1205 05:27:25.125851 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:25 crc kubenswrapper[4652]: E1205 05:27:25.125909 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.152050 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.152077 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.152086 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.152097 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.152105 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.253986 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.254019 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.254146 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.254173 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.254183 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.356403 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.356432 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.356441 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.356453 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.356461 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.373700 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/1.log" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.375743 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.376139 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.387546 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.396741 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.405974 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.414204 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.421204 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.431997 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.440273 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.448566 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.458268 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.458228 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.458295 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.458413 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.458430 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.458439 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.465065 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.473213 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.481174 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.488055 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.496549 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.503998 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.516240 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:11Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb\\\\nI1205 05:27:11.023044 6137 services_controller.go:451] Built service openshift-operator-lifecycle-manager/packageserver-service cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.153\\\\\\\", Port:5443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 05:27:11.022984 6137 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1205 05:27:11.023065 6137 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-tfrqf after 0 failed attempt(s)\\\\nI1205 05:27:11.023081 6137 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-tfrqf\\\\nI1205 05:27:11.023009 6137 obj_retry.go:386] Retry su\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.560634 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.560678 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.560688 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.560701 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.560709 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.662521 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.662547 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.662572 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.662586 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.662596 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.764036 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.764070 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.764082 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.764095 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.764125 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.865772 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.865803 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.865813 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.865826 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.865837 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.968016 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.968048 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.968058 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.968069 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:25 crc kubenswrapper[4652]: I1205 05:27:25.968077 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:25Z","lastTransitionTime":"2025-12-05T05:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.070107 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.070144 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.070154 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.070166 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.070173 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.125261 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.125429 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.171484 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.171510 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.171518 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.171528 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.171535 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.273602 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.273632 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.273640 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.273652 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.273666 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.375585 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.375628 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.375636 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.375649 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.375656 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.378744 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/2.log" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.379198 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/1.log" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.381235 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d" exitCode=1 Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.381259 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.381295 4652 scope.go:117] "RemoveContainer" containerID="f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.381710 4652 scope.go:117] "RemoveContainer" containerID="62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d" Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.381846 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.392034 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.402816 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.415590 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6b895e25696165808dec07aaf3163a46d9f2287d5ce626fd2a981b872704e57\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:11Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb\\\\nI1205 05:27:11.023044 6137 services_controller.go:451] Built service openshift-operator-lifecycle-manager/packageserver-service cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/packageserver-service\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.153\\\\\\\", Port:5443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 05:27:11.022984 6137 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1205 05:27:11.023065 6137 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-tfrqf after 0 failed attempt(s)\\\\nI1205 05:27:11.023081 6137 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-tfrqf\\\\nI1205 05:27:11.023009 6137 obj_retry.go:386] Retry su\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.423794 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.430826 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.439486 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.447158 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.454770 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.464064 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.470762 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.477202 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.477235 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.477245 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.477258 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.477268 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.479588 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.487431 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.495837 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.503704 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.511891 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.519054 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.579225 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.579253 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.579262 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.579275 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.579283 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.681293 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.681325 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.681335 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.681346 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.681355 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.782579 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.782612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.782621 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.782635 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.782645 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.854137 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.854202 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.854231 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.854300 4652 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.854333 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:58.854323723 +0000 UTC m=+81.091053980 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.854391 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:58.854367314 +0000 UTC m=+81.091097602 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.854425 4652 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.854485 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:58.854471762 +0000 UTC m=+81.091202028 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.884186 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.884208 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.884217 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.884228 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.884256 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.955002 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.955050 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.955152 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.955172 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.955184 4652 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.955210 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.955231 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.955243 4652 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.955219 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:58.955210861 +0000 UTC m=+81.191941128 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:26 crc kubenswrapper[4652]: E1205 05:27:26.955287 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:27:58.955279259 +0000 UTC m=+81.192009526 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.985861 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.985891 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.985901 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.985912 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:26 crc kubenswrapper[4652]: I1205 05:27:26.985920 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:26Z","lastTransitionTime":"2025-12-05T05:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.088230 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.088270 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.088281 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.088297 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.088309 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.125027 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.125061 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:27 crc kubenswrapper[4652]: E1205 05:27:27.125130 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.125181 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:27 crc kubenswrapper[4652]: E1205 05:27:27.125229 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:27 crc kubenswrapper[4652]: E1205 05:27:27.125289 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.190118 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.190149 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.190161 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.190172 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.190183 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.292090 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.292153 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.292170 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.292184 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.292192 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.386007 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/2.log" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.388614 4652 scope.go:117] "RemoveContainer" containerID="62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d" Dec 05 05:27:27 crc kubenswrapper[4652]: E1205 05:27:27.388733 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.394209 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.394242 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.394252 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.394265 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.394273 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.398182 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.406585 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.414067 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.421334 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.430405 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.439679 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.447975 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.455783 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.468107 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.476999 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.486192 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.494116 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.496118 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.496147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.496157 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.496170 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.496179 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.503575 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.512469 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.520063 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.532517 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.598051 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.598080 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.598091 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.598104 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.598122 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.699893 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.699944 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.699956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.699970 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.699982 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.801062 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.801093 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.801104 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.801132 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.801143 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.902875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.902917 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.902929 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.902956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:27 crc kubenswrapper[4652]: I1205 05:27:27.902965 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:27Z","lastTransitionTime":"2025-12-05T05:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.004823 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.004853 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.004863 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.004874 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.004886 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.106892 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.106919 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.106929 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.106941 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.106951 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.125496 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:28 crc kubenswrapper[4652]: E1205 05:27:28.125613 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.135319 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.145700 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.152999 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.162253 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.171195 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.178388 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.186906 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.199837 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.207999 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.208120 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.208142 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.208150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.208162 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.208170 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.219777 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.227313 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.235353 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.242644 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.249935 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.256577 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.264769 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.310012 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.310066 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.310077 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.310091 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.310100 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.411145 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.411174 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.411182 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.411192 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.411201 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.512419 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.512470 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.512484 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.512506 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.512522 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.613691 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.613746 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.613757 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.613770 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.613780 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.715748 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.715777 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.715786 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.715796 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.715805 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.817800 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.817847 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.817862 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.817881 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.817892 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.919765 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.919804 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.919816 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.919830 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.919841 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.940938 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.940980 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.940991 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.941006 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.941018 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: E1205 05:27:28.949805 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.952030 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.952053 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.952061 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.952074 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.952100 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: E1205 05:27:28.960404 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.962698 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.962729 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.962740 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.962750 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.962758 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: E1205 05:27:28.970839 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.972875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.972903 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.972912 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.972926 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.972936 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: E1205 05:27:28.980672 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.983501 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.983521 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.983531 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.983542 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:28 crc kubenswrapper[4652]: I1205 05:27:28.983566 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:28Z","lastTransitionTime":"2025-12-05T05:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:28 crc kubenswrapper[4652]: E1205 05:27:28.994895 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:28 crc kubenswrapper[4652]: E1205 05:27:28.995000 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.020993 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.021022 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.021029 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.021039 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.021049 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.123329 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.123358 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.123388 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.123400 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.123408 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.124645 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.124681 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:29 crc kubenswrapper[4652]: E1205 05:27:29.124735 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.124656 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:29 crc kubenswrapper[4652]: E1205 05:27:29.124796 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:29 crc kubenswrapper[4652]: E1205 05:27:29.124893 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.224912 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.224933 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.224941 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.224951 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.224958 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.326679 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.326712 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.326722 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.326732 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.326740 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.428214 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.428247 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.428256 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.428270 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.428280 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.529987 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.530023 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.530036 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.530049 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.530058 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.631425 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.631465 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.631479 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.631496 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.631508 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.733610 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.733638 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.733646 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.733656 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.733663 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.835741 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.835793 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.835810 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.835830 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.835843 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.937709 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.937732 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.937740 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.937750 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:29 crc kubenswrapper[4652]: I1205 05:27:29.937761 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:29Z","lastTransitionTime":"2025-12-05T05:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.039133 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.039154 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.039162 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.039171 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.039177 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.125426 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:30 crc kubenswrapper[4652]: E1205 05:27:30.125595 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.141386 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.141408 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.141415 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.141425 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.141434 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.243877 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.243906 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.243916 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.243935 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.243943 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.345100 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.345133 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.345141 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.345151 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.345159 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.446581 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.446609 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.446618 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.446627 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.446637 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.551249 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.551270 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.551278 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.551287 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.551296 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.653217 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.653238 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.653248 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.653259 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.653267 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.755027 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.755049 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.755057 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.755066 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.755074 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.857343 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.857415 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.857431 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.857450 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.857463 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.959670 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.959701 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.959711 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.959722 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:30 crc kubenswrapper[4652]: I1205 05:27:30.959732 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:30Z","lastTransitionTime":"2025-12-05T05:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.061592 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.061610 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.061618 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.061627 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.061634 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.124810 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:31 crc kubenswrapper[4652]: E1205 05:27:31.124884 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.124922 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.124955 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:31 crc kubenswrapper[4652]: E1205 05:27:31.125061 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:31 crc kubenswrapper[4652]: E1205 05:27:31.125147 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.163295 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.163345 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.163359 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.163379 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.163393 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.265135 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.265170 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.265183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.265196 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.265206 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.367208 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.367238 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.367247 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.367259 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.367266 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.469311 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.469356 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.469372 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.469389 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.469402 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.570629 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.570667 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.570675 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.570689 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.570699 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.593902 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:31 crc kubenswrapper[4652]: E1205 05:27:31.594006 4652 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:31 crc kubenswrapper[4652]: E1205 05:27:31.594053 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs podName:72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b nodeName:}" failed. No retries permitted until 2025-12-05 05:27:47.594042297 +0000 UTC m=+69.830772564 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs") pod "network-metrics-daemon-vjg6c" (UID: "72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.672253 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.672287 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.672298 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.672311 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.672320 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.774381 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.774414 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.774423 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.774436 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.774446 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.876380 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.876409 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.876418 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.876429 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.876436 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.978422 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.978452 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.978460 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.978470 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:31 crc kubenswrapper[4652]: I1205 05:27:31.978479 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:31Z","lastTransitionTime":"2025-12-05T05:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.079812 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.079842 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.079851 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.079862 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.079868 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.125651 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:32 crc kubenswrapper[4652]: E1205 05:27:32.125777 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.181464 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.181491 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.181500 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.181512 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.181525 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.283470 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.283503 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.283512 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.283525 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.283534 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.385011 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.385034 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.385041 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.385051 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.385058 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.486746 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.486774 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.486786 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.486798 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.486805 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.588774 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.588819 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.588830 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.588842 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.588852 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.690997 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.691026 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.691035 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.691045 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.691052 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.792875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.792909 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.792926 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.792939 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.792947 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.894738 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.894777 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.894785 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.894799 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.894807 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.997275 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.997305 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.997315 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.997326 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:32 crc kubenswrapper[4652]: I1205 05:27:32.997334 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:32Z","lastTransitionTime":"2025-12-05T05:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.098487 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.098516 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.098526 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.098537 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.098545 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.125218 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:33 crc kubenswrapper[4652]: E1205 05:27:33.125302 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.125355 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.125403 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:33 crc kubenswrapper[4652]: E1205 05:27:33.125480 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:33 crc kubenswrapper[4652]: E1205 05:27:33.125603 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.200073 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.200176 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.200250 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.200316 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.200373 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.302299 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.302330 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.302341 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.302354 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.302362 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.403541 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.403578 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.403585 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.403597 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.403604 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.505367 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.505395 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.505404 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.505415 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.505424 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.606756 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.606784 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.606793 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.606803 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.606810 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.708785 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.708820 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.708831 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.708845 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.708856 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.761824 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.770983 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.772596 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.782093 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.791238 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.800841 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.807644 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.809956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.809980 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.809988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.809999 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.810007 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.816511 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.824162 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.831441 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.839200 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.846632 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.859292 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.867928 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.875804 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.883513 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.891102 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.898421 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.911548 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.911608 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.911620 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.911635 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:33 crc kubenswrapper[4652]: I1205 05:27:33.911643 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:33Z","lastTransitionTime":"2025-12-05T05:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.013437 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.013463 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.013473 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.013483 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.013492 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.114677 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.114703 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.114713 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.114740 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.114750 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.125131 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:34 crc kubenswrapper[4652]: E1205 05:27:34.125233 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.216649 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.216677 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.216685 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.216695 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.216704 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.318338 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.318383 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.318394 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.318404 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.318413 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.420574 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.420605 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.420613 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.420622 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.420630 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.522727 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.522758 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.522767 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.522780 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.522790 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.624294 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.624324 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.624352 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.624372 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.624382 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.726157 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.726187 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.726195 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.726207 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.726216 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.827576 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.827605 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.827633 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.827645 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.827653 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.929434 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.929465 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.929476 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.929487 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:34 crc kubenswrapper[4652]: I1205 05:27:34.929499 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:34Z","lastTransitionTime":"2025-12-05T05:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.032143 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.032170 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.032180 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.032189 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.032196 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.125182 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.125192 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.125223 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:35 crc kubenswrapper[4652]: E1205 05:27:35.125289 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:35 crc kubenswrapper[4652]: E1205 05:27:35.125367 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:35 crc kubenswrapper[4652]: E1205 05:27:35.125411 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.134282 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.134317 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.134326 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.134339 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.134348 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.236263 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.236292 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.236303 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.236330 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.236339 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.338257 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.338284 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.338292 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.338301 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.338309 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.440271 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.440310 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.440322 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.440334 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.440343 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.542403 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.542426 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.542435 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.542462 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.542472 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.643798 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.643833 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.643842 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.643856 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.643865 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.744931 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.744973 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.744986 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.745002 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.745014 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.846953 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.846976 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.846984 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.846993 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.847000 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.949087 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.949129 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.949140 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.949150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:35 crc kubenswrapper[4652]: I1205 05:27:35.949158 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:35Z","lastTransitionTime":"2025-12-05T05:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.050869 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.050900 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.050907 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.050918 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.050937 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.125102 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:36 crc kubenswrapper[4652]: E1205 05:27:36.125239 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.152905 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.152934 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.152942 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.152951 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.152959 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.253978 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.254020 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.254028 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.254041 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.254050 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.356394 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.356421 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.356429 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.356440 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.356450 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.458877 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.458931 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.458942 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.458961 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.458970 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.560877 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.560913 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.560921 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.560935 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.560944 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.663056 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.663093 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.663101 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.663122 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.663130 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.764578 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.764614 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.764622 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.764632 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.764640 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.866488 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.866518 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.866526 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.866587 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.866598 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.968279 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.968318 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.968328 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.968344 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:36 crc kubenswrapper[4652]: I1205 05:27:36.968356 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:36Z","lastTransitionTime":"2025-12-05T05:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.070438 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.070460 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.070468 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.070479 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.070486 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.125127 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.125159 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:37 crc kubenswrapper[4652]: E1205 05:27:37.125215 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.125239 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:37 crc kubenswrapper[4652]: E1205 05:27:37.125270 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:37 crc kubenswrapper[4652]: E1205 05:27:37.125366 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.171904 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.171930 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.171939 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.171948 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.171956 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.274026 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.274054 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.274062 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.274073 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.274080 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.376174 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.376207 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.376216 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.376226 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.376235 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.477948 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.477980 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.477988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.477999 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.478008 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.580026 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.580063 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.580072 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.580086 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.580097 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.682600 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.682626 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.682637 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.682651 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.682662 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.784145 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.784172 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.784180 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.784190 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.784198 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.885457 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.885485 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.885495 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.885506 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.885515 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.986921 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.986949 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.986956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.986969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:37 crc kubenswrapper[4652]: I1205 05:27:37.986976 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:37Z","lastTransitionTime":"2025-12-05T05:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.088018 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.088045 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.088055 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.088066 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.088073 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.124638 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:38 crc kubenswrapper[4652]: E1205 05:27:38.124737 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.136464 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.145842 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.157893 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.164631 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.172357 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.180508 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.187856 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.189408 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.189681 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.189730 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.189743 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.189909 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.197738 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.205189 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.214610 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.222877 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.230260 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.238478 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.248090 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.256656 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.264307 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.270978 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.291112 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.291186 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.291198 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.291210 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.291220 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.393418 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.393452 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.393461 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.393474 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.393483 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.494990 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.495024 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.495035 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.495048 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.495057 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.596916 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.597029 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.597110 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.597179 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.597244 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.698758 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.698954 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.698963 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.698975 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.698983 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.801053 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.801083 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.801092 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.801109 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.801116 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.902970 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.903359 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.903437 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.903500 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:38 crc kubenswrapper[4652]: I1205 05:27:38.903577 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:38Z","lastTransitionTime":"2025-12-05T05:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.005190 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.005226 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.005235 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.005247 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.005255 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.107548 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.107594 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.107602 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.107611 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.107619 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.125001 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.125053 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.125052 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.125313 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.125414 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.125485 4652 scope.go:117] "RemoveContainer" containerID="62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d" Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.125502 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.125660 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.209322 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.209444 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.209529 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.209631 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.209784 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.223226 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.223351 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.223455 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.223532 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.223651 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.233592 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:39Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.236202 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.236223 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.236231 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.236240 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.236248 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.244516 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:39Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.246949 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.246974 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.246981 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.246990 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.246997 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.255290 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:39Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.257523 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.257632 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.257696 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.257754 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.257809 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.265746 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:39Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.267924 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.267950 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.267958 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.267969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.267978 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.276244 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:39Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:39 crc kubenswrapper[4652]: E1205 05:27:39.276347 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.310903 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.311140 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.311215 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.311278 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.311356 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.412954 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.412999 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.413013 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.413031 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.413045 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.514109 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.514131 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.514139 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.514150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.514158 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.616299 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.616394 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.616460 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.616527 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.616613 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.718120 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.718141 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.718150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.718159 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.718166 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.819514 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.819640 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.819712 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.819783 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.819844 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.920934 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.921255 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.921339 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.921409 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:39 crc kubenswrapper[4652]: I1205 05:27:39.921477 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:39Z","lastTransitionTime":"2025-12-05T05:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.023236 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.023426 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.023500 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.023583 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.023646 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.124948 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:40 crc kubenswrapper[4652]: E1205 05:27:40.125021 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.125288 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.125305 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.125311 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.125319 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.125326 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.226619 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.226718 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.226822 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.226875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.226922 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.329007 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.329160 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.329252 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.329333 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.329413 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.431790 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.431909 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.431980 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.432044 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.432097 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.533650 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.533698 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.533711 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.533730 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.533743 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.636604 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.636708 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.636878 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.637063 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.637248 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.739861 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.739973 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.740059 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.740128 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.740189 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.841989 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.842034 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.842044 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.842064 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.842076 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.943678 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.943732 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.943745 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.943762 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:40 crc kubenswrapper[4652]: I1205 05:27:40.943771 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:40Z","lastTransitionTime":"2025-12-05T05:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.045499 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.045634 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.045697 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.045771 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.045826 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.124911 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.124935 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:41 crc kubenswrapper[4652]: E1205 05:27:41.125004 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.124914 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:41 crc kubenswrapper[4652]: E1205 05:27:41.125178 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:41 crc kubenswrapper[4652]: E1205 05:27:41.125329 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.147785 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.147817 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.147826 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.147840 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.147850 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.249992 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.250043 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.250054 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.250071 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.250082 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.352390 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.352427 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.352437 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.352451 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.352462 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.454088 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.454130 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.454139 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.454150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.454159 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.555756 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.555793 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.555802 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.555815 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.555825 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.658028 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.658061 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.658070 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.658082 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.658089 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.759585 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.759621 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.759635 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.759648 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.759659 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.861640 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.861789 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.861862 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.861931 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.861987 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.963536 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.963579 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.963590 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.963600 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:41 crc kubenswrapper[4652]: I1205 05:27:41.963608 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:41Z","lastTransitionTime":"2025-12-05T05:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.065132 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.065172 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.065180 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.065195 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.065203 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.125316 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:42 crc kubenswrapper[4652]: E1205 05:27:42.125401 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.169871 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.169901 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.169912 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.170170 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.170194 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.272057 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.272084 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.272094 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.272115 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.272125 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.374425 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.374480 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.374513 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.374528 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.374541 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.476521 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.476571 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.476585 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.476596 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.476604 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.578406 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.578431 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.578441 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.578452 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.578460 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.679880 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.679903 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.679911 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.679922 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.679930 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.781545 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.781594 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.781606 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.781625 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.781634 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.883191 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.883218 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.883227 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.883239 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.883249 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.985245 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.985272 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.985282 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.985293 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:42 crc kubenswrapper[4652]: I1205 05:27:42.985302 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:42Z","lastTransitionTime":"2025-12-05T05:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.087080 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.087134 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.087147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.087162 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.087172 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.124833 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.124866 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.124871 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:43 crc kubenswrapper[4652]: E1205 05:27:43.124933 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:43 crc kubenswrapper[4652]: E1205 05:27:43.125002 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:43 crc kubenswrapper[4652]: E1205 05:27:43.125073 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.188969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.189004 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.189018 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.189032 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.189043 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.291799 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.291830 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.291839 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.291849 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.291859 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.393727 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.393760 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.393770 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.393782 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.393792 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.496029 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.496056 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.496066 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.496078 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.496086 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.598223 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.598262 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.598271 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.598284 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.598295 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.699929 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.699982 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.699995 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.700008 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.700016 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.802014 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.802049 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.802062 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.802078 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.802089 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.903965 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.903992 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.904003 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.904016 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:43 crc kubenswrapper[4652]: I1205 05:27:43.904025 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:43Z","lastTransitionTime":"2025-12-05T05:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.005861 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.005896 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.005907 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.005920 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.005938 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.108243 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.108272 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.108283 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.108295 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.108304 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.124709 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:44 crc kubenswrapper[4652]: E1205 05:27:44.124806 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.210312 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.210345 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.210353 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.210365 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.210373 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.311693 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.311721 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.311730 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.311740 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.311747 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.414116 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.414174 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.414186 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.414213 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.414227 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.515979 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.516030 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.516041 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.516259 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.516274 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.618404 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.618669 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.618684 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.618702 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.618713 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.720438 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.720472 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.720480 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.720494 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.720505 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.822418 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.822451 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.822461 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.822475 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.822486 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.924540 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.924600 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.924609 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.924623 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:44 crc kubenswrapper[4652]: I1205 05:27:44.924631 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:44Z","lastTransitionTime":"2025-12-05T05:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.026410 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.026445 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.026455 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.026469 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.026477 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.125365 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.125399 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.125412 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:45 crc kubenswrapper[4652]: E1205 05:27:45.125490 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:45 crc kubenswrapper[4652]: E1205 05:27:45.125659 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:45 crc kubenswrapper[4652]: E1205 05:27:45.125842 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.128510 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.128550 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.128586 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.128603 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.128617 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.135590 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.230552 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.230612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.230621 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.230634 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.230642 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.332675 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.332715 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.332723 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.332739 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.332749 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.434464 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.434534 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.434544 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.434572 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.434591 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.535979 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.536006 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.536014 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.536025 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.536032 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.638126 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.638150 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.638158 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.638167 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.638175 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.739802 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.739831 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.739841 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.739854 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.739864 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.841847 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.841880 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.841893 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.841909 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.841920 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.943093 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.943141 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.943151 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.943165 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:45 crc kubenswrapper[4652]: I1205 05:27:45.943177 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:45Z","lastTransitionTime":"2025-12-05T05:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.044596 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.044624 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.044635 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.044649 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.044658 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.125465 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:46 crc kubenswrapper[4652]: E1205 05:27:46.125574 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.146630 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.146656 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.146665 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.146676 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.146684 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.248299 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.248315 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.248323 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.248332 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.248339 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.350368 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.350393 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.350404 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.350417 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.350426 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.452025 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.452080 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.452117 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.452131 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.452143 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.554792 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.554861 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.554872 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.554895 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.554906 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.656895 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.656942 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.656953 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.656969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.656981 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.759108 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.759148 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.759158 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.759175 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.759188 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.862921 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.862964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.862975 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.862995 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.863006 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.965045 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.965089 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.965108 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.965123 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:46 crc kubenswrapper[4652]: I1205 05:27:46.965132 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:46Z","lastTransitionTime":"2025-12-05T05:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.067015 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.067053 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.067063 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.067077 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.067088 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.124887 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.124919 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.124941 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:47 crc kubenswrapper[4652]: E1205 05:27:47.125017 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:47 crc kubenswrapper[4652]: E1205 05:27:47.125117 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:47 crc kubenswrapper[4652]: E1205 05:27:47.125177 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.170434 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.170490 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.170503 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.170526 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.170540 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.272490 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.272535 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.272545 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.272572 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.272583 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.374131 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.374156 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.374165 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.374179 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.374190 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.475856 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.475956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.476127 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.476203 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.476266 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.577695 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.577714 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.577721 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.577729 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.577735 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.629084 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:47 crc kubenswrapper[4652]: E1205 05:27:47.629221 4652 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:47 crc kubenswrapper[4652]: E1205 05:27:47.629266 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs podName:72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b nodeName:}" failed. No retries permitted until 2025-12-05 05:28:19.629249898 +0000 UTC m=+101.865980175 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs") pod "network-metrics-daemon-vjg6c" (UID: "72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.679793 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.679814 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.679822 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.679832 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.679839 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.781145 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.781252 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.781344 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.781424 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.781481 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.882718 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.882844 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.882917 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.882983 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.883048 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.985256 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.985295 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.985304 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.985317 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:47 crc kubenswrapper[4652]: I1205 05:27:47.985325 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:47Z","lastTransitionTime":"2025-12-05T05:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.086545 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.086937 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.087023 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.087111 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.087181 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.125094 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:48 crc kubenswrapper[4652]: E1205 05:27:48.125216 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.136996 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.146437 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.153060 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.160824 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.167866 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.180148 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.187868 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.188864 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.188891 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.188900 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.188912 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.188923 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.198660 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.206941 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.214608 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.221861 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.236496 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.265994 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.279628 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.289933 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.290816 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.290844 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.290855 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.290869 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.290880 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.300117 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.311478 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.320492 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.392805 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.392843 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.392854 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.392867 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.392876 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.494426 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.494458 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.494467 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.494481 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.494491 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.596536 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.596591 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.596603 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.596615 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.596627 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.698586 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.698613 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.698623 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.698633 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.698641 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.800161 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.800201 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.800212 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.800227 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.800238 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.901632 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.901662 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.901671 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.901682 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:48 crc kubenswrapper[4652]: I1205 05:27:48.901691 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:48Z","lastTransitionTime":"2025-12-05T05:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.003698 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.003734 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.003742 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.003756 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.003766 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.105493 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.105532 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.105546 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.105571 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.105580 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.125537 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.125543 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.125576 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.125648 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.125734 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.125888 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.206972 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.207005 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.207014 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.207023 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.207031 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.308650 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.308683 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.308692 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.308702 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.308710 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.409767 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.409795 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.409805 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.409816 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.409822 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.419321 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.421988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.422011 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.422019 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.422031 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.422037 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.430364 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.432868 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.432897 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.432907 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.432921 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.432930 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.442457 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/0.log" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.442491 4652 generic.go:334] "Generic (PLEG): container finished" podID="57ea6288-d271-498d-ad7e-aa90f3d433e4" containerID="33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3" exitCode=1 Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.442516 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-569vn" event={"ID":"57ea6288-d271-498d-ad7e-aa90f3d433e4","Type":"ContainerDied","Data":"33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.442792 4652 scope.go:117] "RemoveContainer" containerID="33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3" Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.443482 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.445905 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.445985 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.446048 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.446135 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.446196 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.452849 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.455008 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.462617 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.462725 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.462734 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.462747 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.462756 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.465446 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.472402 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: E1205 05:27:49.472519 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.473840 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.473875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.473886 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.473899 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.473908 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.479240 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.489969 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.499761 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.508260 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.517451 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.526354 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.535806 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.547051 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.558071 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.567266 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.576212 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.576242 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.576252 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.576266 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.576274 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.576265 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:48Z\\\",\\\"message\\\":\\\"2025-12-05T05:27:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736\\\\n2025-12-05T05:27:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736 to /host/opt/cni/bin/\\\\n2025-12-05T05:27:03Z [verbose] multus-daemon started\\\\n2025-12-05T05:27:03Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:27:48Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.586732 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.593182 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.601174 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.610565 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.617591 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:49Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.678947 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.678982 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.678992 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.679007 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.679018 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.780875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.780910 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.780921 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.780932 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.780942 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.882742 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.882781 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.882791 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.882807 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.882819 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.984757 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.984783 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.984792 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.984803 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:49 crc kubenswrapper[4652]: I1205 05:27:49.984811 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:49Z","lastTransitionTime":"2025-12-05T05:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.086106 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.086130 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.086138 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.086148 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.086160 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.125061 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:50 crc kubenswrapper[4652]: E1205 05:27:50.125185 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.187773 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.187803 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.187813 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.187823 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.187832 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.289211 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.289246 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.289255 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.289267 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.289275 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.390981 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.391003 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.391012 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.391023 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.391030 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.448319 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/0.log" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.448362 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-569vn" event={"ID":"57ea6288-d271-498d-ad7e-aa90f3d433e4","Type":"ContainerStarted","Data":"282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.458363 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.466251 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.474323 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.482839 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.489854 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.492172 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.492195 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.492204 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.492214 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.492221 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.502332 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.510746 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.519068 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.530425 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.537775 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.544441 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.551108 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.560501 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.567927 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.576798 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:48Z\\\",\\\"message\\\":\\\"2025-12-05T05:27:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736\\\\n2025-12-05T05:27:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736 to /host/opt/cni/bin/\\\\n2025-12-05T05:27:03Z [verbose] multus-daemon started\\\\n2025-12-05T05:27:03Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:27:48Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.586434 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.593245 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.594363 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.594393 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.594402 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.594416 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.594427 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.603722 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:50Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.696338 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.696360 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.696370 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.696379 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.696386 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.798220 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.798243 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.798252 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.798261 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.798270 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.900090 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.900193 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.900281 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.900372 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:50 crc kubenswrapper[4652]: I1205 05:27:50.900434 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:50Z","lastTransitionTime":"2025-12-05T05:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.001841 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.001871 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.001882 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.001895 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.001905 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.103428 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.103539 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.103622 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.103687 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.103746 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.125573 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:51 crc kubenswrapper[4652]: E1205 05:27:51.125954 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.126145 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:51 crc kubenswrapper[4652]: E1205 05:27:51.126254 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.126417 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:51 crc kubenswrapper[4652]: E1205 05:27:51.126532 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.205271 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.205364 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.205424 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.205476 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.205624 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.307480 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.307527 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.307539 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.307550 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.307578 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.409158 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.409304 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.409367 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.409424 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.409478 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.511701 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.511754 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.511767 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.511787 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.511801 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.613277 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.613317 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.613330 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.613346 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.613356 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.714797 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.714829 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.714838 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.714851 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.714861 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.816332 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.816378 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.816390 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.816400 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.816408 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.918266 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.918375 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.918441 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.918505 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:51 crc kubenswrapper[4652]: I1205 05:27:51.918592 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:51Z","lastTransitionTime":"2025-12-05T05:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.020095 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.020213 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.020269 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.020333 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.020395 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.122471 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.122497 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.122505 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.122515 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.122524 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.124958 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:52 crc kubenswrapper[4652]: E1205 05:27:52.125061 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.223682 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.223724 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.223732 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.223743 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.223752 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.325659 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.325684 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.325691 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.325701 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.325708 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.427685 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.427745 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.427754 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.427764 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.427771 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.529383 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.529485 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.529541 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.529616 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.529691 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.631175 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.631201 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.631229 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.631239 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.631247 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.732884 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.732922 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.732933 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.732953 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.732964 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.834593 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.834688 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.834750 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.834816 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.834877 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.940079 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.940113 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.940123 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.940529 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:52 crc kubenswrapper[4652]: I1205 05:27:52.940569 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:52Z","lastTransitionTime":"2025-12-05T05:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.042259 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.042290 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.042298 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.042310 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.042318 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.125596 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:53 crc kubenswrapper[4652]: E1205 05:27:53.125676 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.125701 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.125766 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:53 crc kubenswrapper[4652]: E1205 05:27:53.125871 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:53 crc kubenswrapper[4652]: E1205 05:27:53.125969 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.143994 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.144020 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.144030 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.144042 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.144049 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.246059 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.246088 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.246096 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.246113 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.246120 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.347756 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.347788 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.347795 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.347807 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.347823 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.449466 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.449518 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.449533 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.449550 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.449598 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.550853 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.550881 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.550889 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.550907 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.550915 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.652194 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.652244 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.652260 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.652276 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.652288 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.753625 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.753653 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.753662 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.753673 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.753681 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.856087 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.856162 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.856177 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.856191 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.856201 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.958291 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.958332 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.958343 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.958359 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:53 crc kubenswrapper[4652]: I1205 05:27:53.958367 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:53Z","lastTransitionTime":"2025-12-05T05:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.060474 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.060519 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.060528 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.060542 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.060550 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.125656 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:54 crc kubenswrapper[4652]: E1205 05:27:54.125784 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.125883 4652 scope.go:117] "RemoveContainer" containerID="62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.164049 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.164095 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.164133 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.164162 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.164178 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.266550 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.266589 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.266600 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.266614 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.266621 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.368776 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.368809 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.368817 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.368829 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.368839 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.459161 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/2.log" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.461038 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.461318 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.469324 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.469873 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.469920 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.469929 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.469943 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.469952 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.478160 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.486124 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.493388 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.505793 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.514073 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.523041 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.530970 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.539475 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.546740 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.555517 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.564385 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.571519 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.571577 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.571588 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.571607 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.571616 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.573795 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.583099 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:48Z\\\",\\\"message\\\":\\\"2025-12-05T05:27:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736\\\\n2025-12-05T05:27:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736 to /host/opt/cni/bin/\\\\n2025-12-05T05:27:03Z [verbose] multus-daemon started\\\\n2025-12-05T05:27:03Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:27:48Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.594624 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.607148 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.618488 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.627051 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.673693 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.673735 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.673745 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.673760 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.673771 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.775589 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.775625 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.775635 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.775654 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.775667 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.877777 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.877821 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.877832 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.877847 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.877856 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.979365 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.979397 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.979407 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.979420 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:54 crc kubenswrapper[4652]: I1205 05:27:54.979442 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:54Z","lastTransitionTime":"2025-12-05T05:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.081624 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.081666 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.081676 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.081689 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.081698 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.125290 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.125357 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:55 crc kubenswrapper[4652]: E1205 05:27:55.125470 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.125663 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:55 crc kubenswrapper[4652]: E1205 05:27:55.125744 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:55 crc kubenswrapper[4652]: E1205 05:27:55.125925 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.183337 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.183379 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.183388 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.183400 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.183409 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.284725 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.284753 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.284760 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.284769 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.284778 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.386177 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.386214 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.386224 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.386237 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.386247 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.464608 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/3.log" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.465028 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/2.log" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.466863 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" exitCode=1 Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.466899 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.466958 4652 scope.go:117] "RemoveContainer" containerID="62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.467296 4652 scope.go:117] "RemoveContainer" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" Dec 05 05:27:55 crc kubenswrapper[4652]: E1205 05:27:55.467529 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.480477 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.487933 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.487964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.487977 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.487991 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.488000 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.491236 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.499098 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.507745 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.515939 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.528729 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62c2cb9a0f83f4d8325ee44df81dfec2b190a378a4d516261a31676ed23bc90d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:25Z\\\",\\\"message\\\":\\\":[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e4e4203e-87c7-4024-930a-5d6bdfe2bdde}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:27:25.416371 6363 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 05:27:25.416376 6363 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1205 05:27:25.416376 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:25Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:25.416383 6363 obj_retry.go:303\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:54Z\\\",\\\"message\\\":\\\"ion-migrator-operator/metrics]} name:Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7f9b8f25-db1a-4d02-a423-9afc5c2fb83c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 05:27:54.750900 6754 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:54.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.535466 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.543540 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.552238 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.560423 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.567700 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.574908 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.584342 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.589639 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.589670 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.589679 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.589692 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.589703 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.592619 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.600823 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.609900 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:48Z\\\",\\\"message\\\":\\\"2025-12-05T05:27:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736\\\\n2025-12-05T05:27:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736 to /host/opt/cni/bin/\\\\n2025-12-05T05:27:03Z [verbose] multus-daemon started\\\\n2025-12-05T05:27:03Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:27:48Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.619824 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.626490 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.691974 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.692023 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.692032 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.692048 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.692076 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.794688 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.794715 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.794724 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.794735 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.794746 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.896344 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.896379 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.896389 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.896405 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.896416 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.998085 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.998128 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.998138 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.998147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:55 crc kubenswrapper[4652]: I1205 05:27:55.998156 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:55Z","lastTransitionTime":"2025-12-05T05:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.099982 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.100018 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.100029 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.100045 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.100056 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.124906 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:56 crc kubenswrapper[4652]: E1205 05:27:56.125041 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.201708 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.201755 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.201772 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.201788 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.201879 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.307930 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.307978 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.307989 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.308001 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.308011 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.410422 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.410470 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.410483 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.410504 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.410520 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.471722 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/3.log" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.474970 4652 scope.go:117] "RemoveContainer" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" Dec 05 05:27:56 crc kubenswrapper[4652]: E1205 05:27:56.475159 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.485781 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.496527 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.505187 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.512344 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.512397 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.512410 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.512428 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.512441 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.512809 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.528836 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:54Z\\\",\\\"message\\\":\\\"ion-migrator-operator/metrics]} name:Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7f9b8f25-db1a-4d02-a423-9afc5c2fb83c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 05:27:54.750900 6754 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:54.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.539037 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.548575 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.557405 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.565520 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.572671 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.580752 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.589745 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.599927 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.610417 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:48Z\\\",\\\"message\\\":\\\"2025-12-05T05:27:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736\\\\n2025-12-05T05:27:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736 to /host/opt/cni/bin/\\\\n2025-12-05T05:27:03Z [verbose] multus-daemon started\\\\n2025-12-05T05:27:03Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:27:48Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.614168 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.614200 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.614212 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.614232 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.614243 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.622464 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.630240 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.639918 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.648507 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.716176 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.716210 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.716220 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.716231 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.716238 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.818311 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.818349 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.818360 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.818380 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.818400 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.920173 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.920208 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.920217 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.920226 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:56 crc kubenswrapper[4652]: I1205 05:27:56.920236 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:56Z","lastTransitionTime":"2025-12-05T05:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.022262 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.022298 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.022306 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.022318 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.022328 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.124606 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.124643 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.124645 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:57 crc kubenswrapper[4652]: E1205 05:27:57.124735 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.124649 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.124650 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: E1205 05:27:57.124819 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.124837 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.124865 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: E1205 05:27:57.124880 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.124882 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.226401 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.226432 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.226440 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.226452 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.226463 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.327962 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.327992 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.328001 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.328012 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.328020 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.429368 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.429403 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.429411 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.429424 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.429434 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.530687 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.530721 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.530729 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.530739 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.530748 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.632617 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.632671 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.632683 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.632697 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.632708 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.734614 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.734687 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.734704 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.734725 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.734740 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.836437 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.836467 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.836482 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.836493 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.836501 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.938875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.938908 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.938919 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.938929 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:57 crc kubenswrapper[4652]: I1205 05:27:57.938937 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:57Z","lastTransitionTime":"2025-12-05T05:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.040873 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.040905 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.040915 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.040925 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.040935 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.124997 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:27:58 crc kubenswrapper[4652]: E1205 05:27:58.125326 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.137219 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.142678 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.142710 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.142721 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.142732 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.142742 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.149089 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.158263 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.165894 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.174070 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.181167 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.189853 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.201281 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.210019 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:48Z\\\",\\\"message\\\":\\\"2025-12-05T05:27:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736\\\\n2025-12-05T05:27:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736 to /host/opt/cni/bin/\\\\n2025-12-05T05:27:03Z [verbose] multus-daemon started\\\\n2025-12-05T05:27:03Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:27:48Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.220436 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.228051 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.237517 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.244496 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.244529 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.244543 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.244574 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.244586 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.245856 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.252614 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.260804 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.268781 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.276363 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.288758 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:54Z\\\",\\\"message\\\":\\\"ion-migrator-operator/metrics]} name:Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7f9b8f25-db1a-4d02-a423-9afc5c2fb83c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 05:27:54.750900 6754 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:54.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:58Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.346575 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.346612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.346627 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.346641 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.346653 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.448255 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.448548 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.448576 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.448592 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.448603 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.550411 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.550443 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.550474 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.550489 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.550498 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.652233 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.652262 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.652272 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.652283 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.652291 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.754585 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.754651 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.754666 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.754690 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.754705 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.856617 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.856765 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.856829 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.856886 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.856957 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.924015 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.924181 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:58 crc kubenswrapper[4652]: E1205 05:27:58.924224 4652 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:27:58 crc kubenswrapper[4652]: E1205 05:27:58.924262 4652 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:27:58 crc kubenswrapper[4652]: E1205 05:27:58.924292 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:29:02.924273317 +0000 UTC m=+145.161003594 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.924233 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:58 crc kubenswrapper[4652]: E1205 05:27:58.924312 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:29:02.924301921 +0000 UTC m=+145.161032188 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:27:58 crc kubenswrapper[4652]: E1205 05:27:58.924490 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:02.92447669 +0000 UTC m=+145.161206956 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.958297 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.958317 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.958325 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.958337 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:58 crc kubenswrapper[4652]: I1205 05:27:58.958346 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:58Z","lastTransitionTime":"2025-12-05T05:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.025169 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.025217 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.025308 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.025327 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.025336 4652 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.025362 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:29:03.02535522 +0000 UTC m=+145.262085487 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.025536 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.025651 4652 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.025718 4652 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.025823 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:29:03.025811779 +0000 UTC m=+145.262542056 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.065131 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.065176 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.065186 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.065201 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.065212 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.125659 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.125792 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.125823 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.125867 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.125995 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.126058 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.167286 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.167321 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.167331 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.167345 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.167355 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.269957 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.270003 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.270014 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.270038 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.270055 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.372233 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.372265 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.372278 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.372296 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.372307 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.474971 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.475028 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.475039 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.475058 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.475068 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.577356 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.577399 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.577410 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.577424 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.577436 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.679917 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.679947 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.679956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.679969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.679978 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.781595 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.781625 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.781635 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.781647 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.781656 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.829998 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.830121 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.830188 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.830268 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.830348 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.841647 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:59Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.845836 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.845861 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.845871 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.845882 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.845890 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.855924 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:59Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.859091 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.859136 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.859156 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.859167 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.859175 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.868934 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:59Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.872032 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.872211 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.872303 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.872372 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.872438 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.880492 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:59Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.883163 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.883187 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.883196 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.883205 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.883212 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.891249 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:59Z is after 2025-08-24T17:21:41Z" Dec 05 05:27:59 crc kubenswrapper[4652]: E1205 05:27:59.891405 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.892492 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.892518 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.892527 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.892540 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.892550 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.994964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.994995 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.995007 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.995020 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:27:59 crc kubenswrapper[4652]: I1205 05:27:59.995030 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:27:59Z","lastTransitionTime":"2025-12-05T05:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.098063 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.098224 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.098304 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.098369 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.098427 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.125419 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:00 crc kubenswrapper[4652]: E1205 05:28:00.125590 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.200872 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.200917 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.200930 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.200947 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.200962 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.303020 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.303066 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.303074 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.303089 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.303101 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.407550 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.407615 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.407624 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.407658 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.407671 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.509519 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.509595 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.509607 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.509629 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.509640 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.612325 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.612362 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.612372 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.612387 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.612410 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.715889 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.715932 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.715945 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.715960 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.715970 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.817809 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.817844 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.817853 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.817863 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.817873 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.920540 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.920623 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.920632 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.920643 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:00 crc kubenswrapper[4652]: I1205 05:28:00.920652 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:00Z","lastTransitionTime":"2025-12-05T05:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.022684 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.022785 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.022800 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.022811 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.022819 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.124669 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.124715 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.124721 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:01 crc kubenswrapper[4652]: E1205 05:28:01.124772 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.124671 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.124816 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.124825 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.124839 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.124848 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: E1205 05:28:01.124856 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:01 crc kubenswrapper[4652]: E1205 05:28:01.124914 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.226790 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.226825 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.226835 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.226847 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.226859 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.328809 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.328837 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.328845 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.328856 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.328865 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.430708 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.430739 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.430747 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.430758 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.430768 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.533474 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.533514 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.533525 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.533538 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.533547 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.635220 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.635245 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.635255 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.635265 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.635274 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.737296 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.737326 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.737338 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.737348 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.737356 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.839031 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.839065 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.839075 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.839087 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.839096 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.941253 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.941286 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.941294 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.941307 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:01 crc kubenswrapper[4652]: I1205 05:28:01.941316 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:01Z","lastTransitionTime":"2025-12-05T05:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.043744 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.044084 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.044222 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.044317 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.044387 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.125498 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:02 crc kubenswrapper[4652]: E1205 05:28:02.125763 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.146421 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.146451 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.146460 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.146470 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.146481 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.248468 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.248504 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.248514 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.248527 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.248538 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.350650 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.350682 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.350691 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.350708 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.350720 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.452694 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.452731 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.452741 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.452756 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.452768 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.554506 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.554589 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.554601 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.554625 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.554643 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.656904 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.656939 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.656947 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.656961 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.656969 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.758249 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.758297 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.758308 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.758319 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.758343 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.860710 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.860763 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.860772 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.860782 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.860790 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.962393 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.962425 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.962433 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.962446 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:02 crc kubenswrapper[4652]: I1205 05:28:02.962458 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:02Z","lastTransitionTime":"2025-12-05T05:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.064543 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.064594 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.064605 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.064617 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.064625 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.124609 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:03 crc kubenswrapper[4652]: E1205 05:28:03.124742 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.124817 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.124852 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:03 crc kubenswrapper[4652]: E1205 05:28:03.124909 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:03 crc kubenswrapper[4652]: E1205 05:28:03.125079 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.166229 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.166252 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.166260 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.166271 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.166279 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.268137 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.268166 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.268174 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.268185 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.268192 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.369879 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.369912 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.369923 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.369933 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.369942 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.472796 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.472853 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.472866 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.472888 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.472901 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.574610 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.574642 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.574653 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.574664 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.574672 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.676726 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.676754 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.676764 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.676775 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.676785 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.778509 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.778537 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.778546 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.778572 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.778580 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.880306 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.880351 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.880364 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.880375 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.880383 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.982538 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.982611 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.982621 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.982637 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:03 crc kubenswrapper[4652]: I1205 05:28:03.982648 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:03Z","lastTransitionTime":"2025-12-05T05:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.084206 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.084238 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.084247 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.084256 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.084263 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.124842 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:04 crc kubenswrapper[4652]: E1205 05:28:04.124996 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.186422 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.186487 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.186502 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.186520 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.186534 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.288585 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.288623 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.288648 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.288666 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.288678 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.390421 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.390448 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.390457 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.390466 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.390472 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.492598 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.492628 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.492638 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.492647 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.492655 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.594009 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.594041 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.594052 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.594069 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.594080 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.695865 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.695898 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.695910 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.695925 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.695936 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.797879 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.797950 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.797961 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.797986 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.798006 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.900785 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.900843 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.900856 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.900869 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:04 crc kubenswrapper[4652]: I1205 05:28:04.900878 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:04Z","lastTransitionTime":"2025-12-05T05:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.002612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.002646 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.002656 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.002670 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.002682 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.104661 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.104704 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.104714 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.104728 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.104741 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.125077 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.125087 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.125087 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:05 crc kubenswrapper[4652]: E1205 05:28:05.125192 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:05 crc kubenswrapper[4652]: E1205 05:28:05.125288 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:05 crc kubenswrapper[4652]: E1205 05:28:05.125328 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.207075 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.207197 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.207267 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.207352 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.207427 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.309182 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.309227 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.309235 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.309246 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.309254 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.411207 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.411249 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.411263 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.411279 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.411295 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.512792 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.513420 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.513495 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.513591 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.513667 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.615436 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.615534 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.615642 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.615703 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.615758 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.717591 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.717617 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.717627 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.717639 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.717646 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.819154 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.819250 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.819319 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.819405 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.819461 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.921354 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.921385 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.921396 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.921407 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:05 crc kubenswrapper[4652]: I1205 05:28:05.921415 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:05Z","lastTransitionTime":"2025-12-05T05:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.023674 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.023723 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.023734 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.023752 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.023762 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.124735 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:06 crc kubenswrapper[4652]: E1205 05:28:06.124817 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.126568 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.126614 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.126634 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.129669 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.129713 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.231466 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.231510 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.231524 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.231542 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.231591 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.333130 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.333151 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.333159 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.333183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.333192 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.435088 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.435146 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.435156 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.435174 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.435186 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.537356 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.537667 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.537676 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.537687 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.537694 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.639632 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.639670 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.639679 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.639694 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.639703 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.741487 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.741516 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.741524 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.741548 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.741571 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.843321 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.843353 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.843364 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.843374 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.843383 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.945392 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.945418 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.945428 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.945439 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:06 crc kubenswrapper[4652]: I1205 05:28:06.945446 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:06Z","lastTransitionTime":"2025-12-05T05:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.046800 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.046835 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.046844 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.046854 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.046863 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.125391 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.125422 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.125454 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:07 crc kubenswrapper[4652]: E1205 05:28:07.125508 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:07 crc kubenswrapper[4652]: E1205 05:28:07.125595 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:07 crc kubenswrapper[4652]: E1205 05:28:07.125687 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.148787 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.148817 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.148826 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.148840 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.148851 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.250732 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.250770 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.250783 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.250795 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.250805 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.352509 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.352539 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.352548 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.352579 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.352595 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.454345 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.454374 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.454382 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.454393 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.454400 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.555570 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.555608 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.555620 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.555633 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.555643 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.657290 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.657322 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.657333 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.657344 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.657353 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.759474 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.759506 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.759515 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.759529 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.759539 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.861142 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.861175 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.861189 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.861200 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.861212 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.962619 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.962653 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.962664 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.962676 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:07 crc kubenswrapper[4652]: I1205 05:28:07.962687 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:07Z","lastTransitionTime":"2025-12-05T05:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.064514 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.064570 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.064582 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.064592 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.064599 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.125147 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:08 crc kubenswrapper[4652]: E1205 05:28:08.125256 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.125829 4652 scope.go:117] "RemoveContainer" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" Dec 05 05:28:08 crc kubenswrapper[4652]: E1205 05:28:08.125980 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.137718 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.147673 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.156325 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.164363 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.166304 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.166334 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.166343 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.166354 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.166362 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.172898 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.181644 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.189957 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:48Z\\\",\\\"message\\\":\\\"2025-12-05T05:27:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736\\\\n2025-12-05T05:27:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736 to /host/opt/cni/bin/\\\\n2025-12-05T05:27:03Z [verbose] multus-daemon started\\\\n2025-12-05T05:27:03Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:27:48Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.200846 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.212858 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.222388 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.232252 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.241268 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.250701 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.259118 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.266636 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.267833 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.267869 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.267879 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.267891 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.267900 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.279070 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:54Z\\\",\\\"message\\\":\\\"ion-migrator-operator/metrics]} name:Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7f9b8f25-db1a-4d02-a423-9afc5c2fb83c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 05:27:54.750900 6754 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:54.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.287405 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.295122 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:08Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.369440 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.369470 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.369482 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.369500 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.369513 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.471082 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.471118 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.471137 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.471149 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.471158 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.572403 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.572454 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.572465 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.572475 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.572484 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.673938 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.673981 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.673993 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.674014 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.674027 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.776188 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.776229 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.776239 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.776254 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.776264 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.878081 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.878115 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.878141 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.878154 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.878161 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.980393 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.980420 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.980428 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.980436 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:08 crc kubenswrapper[4652]: I1205 05:28:08.980443 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:08Z","lastTransitionTime":"2025-12-05T05:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.082167 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.082210 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.082222 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.082238 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.082250 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.125064 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.125101 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:09 crc kubenswrapper[4652]: E1205 05:28:09.125269 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.125353 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:09 crc kubenswrapper[4652]: E1205 05:28:09.125374 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:09 crc kubenswrapper[4652]: E1205 05:28:09.125648 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.184466 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.184498 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.184509 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.184520 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.184528 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.286481 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.286517 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.286527 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.286541 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.286550 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.388579 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.388607 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.388618 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.388629 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.388637 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.490756 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.490790 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.490801 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.490817 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.490830 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.593583 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.593609 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.593618 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.593628 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.593635 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.696588 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.696639 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.696649 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.696662 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.696672 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.798415 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.798452 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.798463 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.798476 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.798487 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.900586 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.900610 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.900618 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.900629 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:09 crc kubenswrapper[4652]: I1205 05:28:09.900637 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:09Z","lastTransitionTime":"2025-12-05T05:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.002593 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.002622 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.002632 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.002643 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.002651 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.095331 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.095361 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.095372 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.095383 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.095391 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: E1205 05:28:10.105835 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.108941 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.108969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.108979 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.108992 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.109011 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: E1205 05:28:10.117835 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.120759 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.120803 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.120833 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.120845 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.120855 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.125242 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:10 crc kubenswrapper[4652]: E1205 05:28:10.125434 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:10 crc kubenswrapper[4652]: E1205 05:28:10.130117 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.133147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.133176 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.133186 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.133197 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.133205 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: E1205 05:28:10.141214 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.143416 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.143454 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.143466 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.143495 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.143505 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: E1205 05:28:10.151538 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:28:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b4a8999b-e59d-4947-b2ae-b94914acb85b\\\",\\\"systemUUID\\\":\\\"dd778907-0455-45bb-b295-f8f78dcf8791\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:10Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:10 crc kubenswrapper[4652]: E1205 05:28:10.151721 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.152731 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.152793 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.152808 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.152819 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.152829 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.254329 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.254363 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.254376 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.254391 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.254401 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.355807 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.355897 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.355968 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.356026 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.356091 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.457332 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.457492 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.457583 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.457669 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.457737 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.559471 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.559597 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.559682 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.559751 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.559810 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.661655 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.661685 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.661694 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.661707 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.661716 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.763227 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.763252 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.763260 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.763271 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.763278 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.865053 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.865073 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.865081 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.865092 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.865099 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.967147 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.967174 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.967183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.967192 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:10 crc kubenswrapper[4652]: I1205 05:28:10.967199 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:10Z","lastTransitionTime":"2025-12-05T05:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.068919 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.068956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.068964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.068978 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.068987 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.124623 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.124642 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.124648 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:11 crc kubenswrapper[4652]: E1205 05:28:11.124748 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:11 crc kubenswrapper[4652]: E1205 05:28:11.124817 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:11 crc kubenswrapper[4652]: E1205 05:28:11.124920 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.171700 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.171729 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.171737 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.171747 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.171755 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.273637 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.273667 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.273697 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.273707 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.273716 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.375266 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.375295 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.375303 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.375328 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.375336 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.476855 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.476883 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.476894 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.476905 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.476914 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.578276 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.578301 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.578309 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.578318 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.578325 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.680606 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.680635 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.680662 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.680672 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.680679 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.783079 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.783139 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.783152 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.783166 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.783173 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.884862 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.884902 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.884914 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.884929 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.884940 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.986705 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.986767 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.986776 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.986787 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:11 crc kubenswrapper[4652]: I1205 05:28:11.986794 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:11Z","lastTransitionTime":"2025-12-05T05:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.089222 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.089272 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.089283 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.089296 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.089305 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.125632 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:12 crc kubenswrapper[4652]: E1205 05:28:12.125753 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.190828 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.190859 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.190870 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.190884 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.190893 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.292674 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.292699 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.292724 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.292738 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.292747 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.394698 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.394742 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.394752 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.394765 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.394772 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.496183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.496282 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.496340 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.496408 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.496463 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.598148 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.598168 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.598177 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.598187 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.598194 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.699765 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.699798 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.699808 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.699820 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.699829 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.801034 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.801070 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.801080 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.801092 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.801102 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.902998 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.903022 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.903032 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.903042 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:12 crc kubenswrapper[4652]: I1205 05:28:12.903049 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:12Z","lastTransitionTime":"2025-12-05T05:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.004821 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.004852 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.004863 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.004875 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.004882 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.107000 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.107032 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.107041 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.107051 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.107060 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.125245 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.125262 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.125250 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:13 crc kubenswrapper[4652]: E1205 05:28:13.125337 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:13 crc kubenswrapper[4652]: E1205 05:28:13.125409 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:13 crc kubenswrapper[4652]: E1205 05:28:13.125447 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.207988 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.208013 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.208020 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.208029 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.208037 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.309898 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.309931 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.309943 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.309955 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.309965 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.411588 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.411631 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.411641 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.411651 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.411658 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.513628 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.513653 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.513661 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.513688 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.513696 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.615201 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.615231 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.615257 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.615268 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.615275 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.716925 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.716956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.716965 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.716977 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.716986 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.818854 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.818889 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.818899 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.818911 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.818920 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.920680 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.920731 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.920741 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.920753 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:13 crc kubenswrapper[4652]: I1205 05:28:13.920762 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:13Z","lastTransitionTime":"2025-12-05T05:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.022798 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.022852 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.022862 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.022872 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.022878 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.124650 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:14 crc kubenswrapper[4652]: E1205 05:28:14.124733 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.124751 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.124767 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.124775 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.124784 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.124790 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.226704 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.226735 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.226745 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.226757 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.226766 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.329037 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.329082 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.329093 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.329104 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.329113 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.431311 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.431355 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.431365 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.431380 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.431390 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.533591 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.533657 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.533669 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.533690 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.533705 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.636142 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.636183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.636192 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.636207 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.636217 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.738065 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.738092 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.738102 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.738113 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.738121 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.840387 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.840416 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.840424 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.840435 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.840443 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.941989 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.942034 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.942045 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.942060 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:14 crc kubenswrapper[4652]: I1205 05:28:14.942072 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:14Z","lastTransitionTime":"2025-12-05T05:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.043887 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.043929 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.043939 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.043956 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.043965 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.125130 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.125158 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:15 crc kubenswrapper[4652]: E1205 05:28:15.125325 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:15 crc kubenswrapper[4652]: E1205 05:28:15.125379 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.125192 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:15 crc kubenswrapper[4652]: E1205 05:28:15.125641 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.145968 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.146009 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.146020 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.146033 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.146047 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.248990 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.249021 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.249051 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.249065 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.249076 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.350698 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.350758 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.350768 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.350780 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.350992 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.452446 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.452478 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.452526 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.452537 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.452547 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.554726 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.554761 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.554788 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.554802 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.554810 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.656855 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.656896 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.656908 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.656923 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.656933 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.758510 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.758581 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.758593 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.758613 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.758625 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.859997 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.860132 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.860206 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.860273 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.860329 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.962127 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.962164 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.962172 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.962183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:15 crc kubenswrapper[4652]: I1205 05:28:15.962191 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:15Z","lastTransitionTime":"2025-12-05T05:28:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.063499 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.063576 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.063591 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.063612 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.063625 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.125156 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:16 crc kubenswrapper[4652]: E1205 05:28:16.125320 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.169644 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.169776 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.169872 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.169963 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.170038 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.271295 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.271329 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.271340 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.271351 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.271359 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.372928 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.372962 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.372970 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.372982 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.372994 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.475060 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.475232 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.475299 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.475364 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.475424 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.577495 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.577636 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.577709 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.577782 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.577849 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.679521 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.679583 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.679597 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.679620 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.679632 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.780936 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.780964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.780972 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.780987 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.780996 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.882966 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.883018 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.883028 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.883049 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.883063 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.984508 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.984579 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.984591 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.984605 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:16 crc kubenswrapper[4652]: I1205 05:28:16.984618 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:16Z","lastTransitionTime":"2025-12-05T05:28:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.086190 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.086241 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.086250 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.086268 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.086278 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.125535 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:17 crc kubenswrapper[4652]: E1205 05:28:17.125683 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.125576 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:17 crc kubenswrapper[4652]: E1205 05:28:17.125768 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.125575 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:17 crc kubenswrapper[4652]: E1205 05:28:17.125848 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.188681 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.188726 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.188738 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.188753 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.188764 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.290885 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.290919 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.290928 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.290940 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.290949 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.392993 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.393024 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.393037 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.393048 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.393059 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.495296 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.495333 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.495344 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.495356 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.495389 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.597128 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.597170 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.597181 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.597194 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.597203 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.698936 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.698969 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.698980 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.698991 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.699000 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.801024 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.801062 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.801072 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.801087 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.801096 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.902684 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.902711 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.902719 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.902728 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:17 crc kubenswrapper[4652]: I1205 05:28:17.902735 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:17Z","lastTransitionTime":"2025-12-05T05:28:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.004467 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.004511 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.004524 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.004539 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.004552 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.105780 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.105811 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.105819 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.105830 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.105838 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.125369 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:18 crc kubenswrapper[4652]: E1205 05:28:18.125486 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.136786 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd7547040946179762c8341ae3e657e35e47c38d402041e55265a94c8306d352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acf66f18157e23e2440cfb4399db582d4085a05650665130db3415198db99f5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.145248 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.152326 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nfbsv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c29b6caf-4921-4f3c-a3b7-31abdcba038a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7751ebab62b252f9ba81b16f7a316697482ab035440e28716d8bc91eed0df609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxjm9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nfbsv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.159789 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.168746 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0331197d-08f0-4dec-8d8a-72e6019bd2eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4453f16a8a20f592741cf067ed54d02f545426a316e7d94060aeb48b502041\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rrxgt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-s4t24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.181920 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ab3e4ec7-1775-48b7-8848-a578578629df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:54Z\\\",\\\"message\\\":\\\"ion-migrator-operator/metrics]} name:Service_openshift-kube-storage-version-migrator-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.36:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7f9b8f25-db1a-4d02-a423-9afc5c2fb83c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 05:27:54.750900 6754 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:27:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 05:27:54.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvnhn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-94kb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.190343 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac4cb6a4-c409-4b78-ab83-432b48d9f713\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9271ee89be3e6d246948e5fce214276a8f827e3bdb30856e8477351448dc406\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f6aae2cf9d2ab672899dc74fa98fce9fe808f012d0416081171ee895c5e76f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.199895 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aed172ab1d77cb7c8ab9c339206c7234e57f0b341ec36fe534393027926545ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.207473 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.207511 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.207522 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.207539 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.207565 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.214136 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.223392 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc2d5aa42907034297b560fa25bfd1dd5197b30cd2a2e931db79e216b0103ead\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.231350 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"043db566-ff59-4a73-845e-b36a6c80d5a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aab62ad2dc6237cbdc939dad868043be30ad46529108d3623de31a538afbcaa6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1626a2cff393e36a89bd90efe35d4aee9e10357a12f4276ddc2b29832617431d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-2wgph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.238853 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shztp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vjg6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.248135 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7692849e-585c-4b9e-8f28-c4b3f677f356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1205 05:26:54.963627 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 05:26:54.963897 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:26:54.965572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3717952744/tls.crt::/tmp/serving-cert-3717952744/tls.key\\\\\\\"\\\\nI1205 05:26:55.209346 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:26:55.211658 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:26:55.211685 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:26:55.211710 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:26:55.211715 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:26:55.216681 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 05:26:55.216707 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216712 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:26:55.216716 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:26:55.216719 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:26:55.216721 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:26:55.216724 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 05:26:55.216933 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 05:26:55.218857 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.256765 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e5938453-b078-44e7-bbc5-6db96692b239\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f6983ac98e5c0b882440f6345f336d78a69f501f3c2677cb957d0dbda159d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf61059e266a199782cbc3a1f7c47aeb8e90c1e9c3d4b6774524cc823c4acd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a46ab3c4feade86fb1621ff3f5d3f3088a34dcda89f4f298bfc5412a5b1ff5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.264533 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0fb349b1-9e59-42e0-b657-2320d9d0e22c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://620a237134007025fca7f7c86d07d353c5995be8247d49b7aba14b04365fe718\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3091b1dbba330c3204a2978e3bdd9fba4253af3512d24fd7044a124a09916d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88f9a1770f16749a478595c14c3c7de3016aa28f1bca0c35ed4f9fbb7647723f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e156c055b9f5a87cb69b395673ac6d436da92e4151863de6700f38b5953812a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:26:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:26:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:26:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.272580 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-569vn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57ea6288-d271-498d-ad7e-aa90f3d433e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:27:48Z\\\",\\\"message\\\":\\\"2025-12-05T05:27:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736\\\\n2025-12-05T05:27:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d4785dc8-35b8-4735-9d91-ff9597f1e736 to /host/opt/cni/bin/\\\\n2025-12-05T05:27:03Z [verbose] multus-daemon started\\\\n2025-12-05T05:27:03Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:27:48Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xg2ht\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-569vn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.282350 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d571ff-6054-4804-b819-bcee09f6ed35\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19e621914569a4dac2245ce786112a90f6519f9eb6057fdf72dcf39586c2e80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c809c6b806ea1c3ea3bb4220955769d2a864476879680ffd17c24e949bcd86c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3db494bad21877f5a4d24a8446d1183147f42bc154b9b383264bd04b96feeaea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a4c96e63d8e1a10ef67b19a796b2071dd9b8b85c8c76162fd3b4bc78d99dd3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://097ce826c9ce9d6463fda963916684ac5fa5de4f405ed30a7972f3bbdd49b76d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc5cb0e7b6ac6022f55db24bc8c0f5f1493fb2ecd848d98faf8965026fcfc94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dbd6894b3ddabf013099e37fd98206bc57e1e72899225f7e839aad6ba61eba5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:27:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hhbnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tfrqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.289240 4652 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5cpl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f611565f-a3f7-4dec-98c8-cc6c022ec406\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abcf74bd4a17125df3b2c8f8e7538697b91413e74c77acca90c3a89fe6bbabb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tmcmr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:27:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5cpl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:28:18Z is after 2025-08-24T17:21:41Z" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.309769 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.309804 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.309817 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.309840 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.309851 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.412296 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.412459 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.412534 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.412648 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.412737 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.514634 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.514659 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.514668 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.514681 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.514692 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.616874 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.616903 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.616912 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.616925 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.616934 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.718758 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.718868 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.718942 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.719003 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.719061 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.820906 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.821261 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.821331 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.821393 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.821452 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.922881 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.923004 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.923081 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.923159 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:18 crc kubenswrapper[4652]: I1205 05:28:18.923233 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:18Z","lastTransitionTime":"2025-12-05T05:28:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.024719 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.024746 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.024754 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.024764 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.024772 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.124903 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:19 crc kubenswrapper[4652]: E1205 05:28:19.124990 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.125004 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.125030 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:19 crc kubenswrapper[4652]: E1205 05:28:19.125108 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:19 crc kubenswrapper[4652]: E1205 05:28:19.125134 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.126103 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.126145 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.126155 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.126168 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.126180 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.228051 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.228078 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.228088 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.228100 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.228108 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.330156 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.330183 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.330192 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.330203 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.330211 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.431832 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.431860 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.431869 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.431881 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.431889 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.534175 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.534208 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.534218 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.534229 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.534237 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.635661 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.635690 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.635702 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.635727 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.635735 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.695362 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:19 crc kubenswrapper[4652]: E1205 05:28:19.695465 4652 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:28:19 crc kubenswrapper[4652]: E1205 05:28:19.695500 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs podName:72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:23.695488181 +0000 UTC m=+165.932218447 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs") pod "network-metrics-daemon-vjg6c" (UID: "72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.737976 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.738017 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.738030 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.738051 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.738064 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.839964 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.839992 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.840003 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.840015 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.840024 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.941706 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.941740 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.941753 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.941764 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:19 crc kubenswrapper[4652]: I1205 05:28:19.941771 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:19Z","lastTransitionTime":"2025-12-05T05:28:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.043657 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.043725 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.043737 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.043761 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.043776 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:20Z","lastTransitionTime":"2025-12-05T05:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.124902 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:20 crc kubenswrapper[4652]: E1205 05:28:20.125134 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.126275 4652 scope.go:117] "RemoveContainer" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" Dec 05 05:28:20 crc kubenswrapper[4652]: E1205 05:28:20.126470 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.145999 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.146036 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.146046 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.146059 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.146072 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:20Z","lastTransitionTime":"2025-12-05T05:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.248138 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.248201 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.248211 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.248231 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.248245 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:20Z","lastTransitionTime":"2025-12-05T05:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.349729 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.349762 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.349770 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.349781 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.349793 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:20Z","lastTransitionTime":"2025-12-05T05:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.388331 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.388422 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.388434 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.388495 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.388511 4652 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:28:20Z","lastTransitionTime":"2025-12-05T05:28:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.428398 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk"] Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.428755 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.430346 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.430549 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.430618 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.431053 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.467090 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podStartSLOduration=79.467063865 podStartE2EDuration="1m19.467063865s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.451274611 +0000 UTC m=+102.688004878" watchObservedRunningTime="2025-12-05 05:28:20.467063865 +0000 UTC m=+102.703794132" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.474102 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=35.474065461 podStartE2EDuration="35.474065461s" podCreationTimestamp="2025-12-05 05:27:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.474029053 +0000 UTC m=+102.710759330" watchObservedRunningTime="2025-12-05 05:28:20.474065461 +0000 UTC m=+102.710795729" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.503582 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a7d0ac97-347b-45a2-a089-ecf373a0262f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.503618 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a7d0ac97-347b-45a2-a089-ecf373a0262f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.503653 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a7d0ac97-347b-45a2-a089-ecf373a0262f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.503749 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7d0ac97-347b-45a2-a089-ecf373a0262f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.503795 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7d0ac97-347b-45a2-a089-ecf373a0262f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.515615 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2wgph" podStartSLOduration=78.515597188 podStartE2EDuration="1m18.515597188s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.506514255 +0000 UTC m=+102.743244522" watchObservedRunningTime="2025-12-05 05:28:20.515597188 +0000 UTC m=+102.752327455" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.561539 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=84.56152548 podStartE2EDuration="1m24.56152548s" podCreationTimestamp="2025-12-05 05:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.543078374 +0000 UTC m=+102.779808631" watchObservedRunningTime="2025-12-05 05:28:20.56152548 +0000 UTC m=+102.798255747" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.574972 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=47.574961324 podStartE2EDuration="47.574961324s" podCreationTimestamp="2025-12-05 05:27:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.571344718 +0000 UTC m=+102.808074985" watchObservedRunningTime="2025-12-05 05:28:20.574961324 +0000 UTC m=+102.811691591" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.575080 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=85.575075428 podStartE2EDuration="1m25.575075428s" podCreationTimestamp="2025-12-05 05:26:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.561694377 +0000 UTC m=+102.798424644" watchObservedRunningTime="2025-12-05 05:28:20.575075428 +0000 UTC m=+102.811805696" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.599521 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-569vn" podStartSLOduration=78.59950386 podStartE2EDuration="1m18.59950386s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.586879823 +0000 UTC m=+102.823610100" watchObservedRunningTime="2025-12-05 05:28:20.59950386 +0000 UTC m=+102.836234127" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.599957 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-tfrqf" podStartSLOduration=78.599951693 podStartE2EDuration="1m18.599951693s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.599669361 +0000 UTC m=+102.836399628" watchObservedRunningTime="2025-12-05 05:28:20.599951693 +0000 UTC m=+102.836681960" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.605002 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7d0ac97-347b-45a2-a089-ecf373a0262f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.605051 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7d0ac97-347b-45a2-a089-ecf373a0262f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.605079 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a7d0ac97-347b-45a2-a089-ecf373a0262f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.605098 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a7d0ac97-347b-45a2-a089-ecf373a0262f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.605133 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a7d0ac97-347b-45a2-a089-ecf373a0262f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.605178 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a7d0ac97-347b-45a2-a089-ecf373a0262f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.605200 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a7d0ac97-347b-45a2-a089-ecf373a0262f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.606004 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a7d0ac97-347b-45a2-a089-ecf373a0262f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.616805 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7d0ac97-347b-45a2-a089-ecf373a0262f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.626022 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7d0ac97-347b-45a2-a089-ecf373a0262f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kqffk\" (UID: \"a7d0ac97-347b-45a2-a089-ecf373a0262f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.633177 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-5cpl7" podStartSLOduration=79.63315145 podStartE2EDuration="1m19.63315145s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.61946296 +0000 UTC m=+102.856193218" watchObservedRunningTime="2025-12-05 05:28:20.63315145 +0000 UTC m=+102.869881717" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.655718 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-nfbsv" podStartSLOduration=79.655693622 podStartE2EDuration="1m19.655693622s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:20.654777668 +0000 UTC m=+102.891507935" watchObservedRunningTime="2025-12-05 05:28:20.655693622 +0000 UTC m=+102.892423889" Dec 05 05:28:20 crc kubenswrapper[4652]: I1205 05:28:20.740252 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" Dec 05 05:28:21 crc kubenswrapper[4652]: I1205 05:28:21.124988 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:21 crc kubenswrapper[4652]: I1205 05:28:21.124995 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:21 crc kubenswrapper[4652]: I1205 05:28:21.124995 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:21 crc kubenswrapper[4652]: E1205 05:28:21.125524 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:21 crc kubenswrapper[4652]: E1205 05:28:21.125414 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:21 crc kubenswrapper[4652]: E1205 05:28:21.125612 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:21 crc kubenswrapper[4652]: I1205 05:28:21.538202 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" event={"ID":"a7d0ac97-347b-45a2-a089-ecf373a0262f","Type":"ContainerStarted","Data":"8810ef7a521375f75f83eea9f3fe6569ef7c581e484f7ff52aa526f8a1b258d3"} Dec 05 05:28:21 crc kubenswrapper[4652]: I1205 05:28:21.538265 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" event={"ID":"a7d0ac97-347b-45a2-a089-ecf373a0262f","Type":"ContainerStarted","Data":"9c7c5b89d2e2fa9785c27cb713f5d5efd32c9a93e90fa539070266ee0ebe3f04"} Dec 05 05:28:21 crc kubenswrapper[4652]: I1205 05:28:21.547641 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kqffk" podStartSLOduration=80.547620631 podStartE2EDuration="1m20.547620631s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:21.547404875 +0000 UTC m=+103.784135143" watchObservedRunningTime="2025-12-05 05:28:21.547620631 +0000 UTC m=+103.784350898" Dec 05 05:28:22 crc kubenswrapper[4652]: I1205 05:28:22.124948 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:22 crc kubenswrapper[4652]: E1205 05:28:22.125281 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:22 crc kubenswrapper[4652]: I1205 05:28:22.140517 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 05:28:23 crc kubenswrapper[4652]: I1205 05:28:23.125641 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:23 crc kubenswrapper[4652]: I1205 05:28:23.125673 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:23 crc kubenswrapper[4652]: E1205 05:28:23.125745 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:23 crc kubenswrapper[4652]: E1205 05:28:23.125846 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:23 crc kubenswrapper[4652]: I1205 05:28:23.125954 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:23 crc kubenswrapper[4652]: E1205 05:28:23.126017 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:24 crc kubenswrapper[4652]: I1205 05:28:24.125194 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:24 crc kubenswrapper[4652]: E1205 05:28:24.125350 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:25 crc kubenswrapper[4652]: I1205 05:28:25.125254 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:25 crc kubenswrapper[4652]: I1205 05:28:25.125293 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:25 crc kubenswrapper[4652]: I1205 05:28:25.125356 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:25 crc kubenswrapper[4652]: E1205 05:28:25.125425 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:25 crc kubenswrapper[4652]: E1205 05:28:25.125367 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:25 crc kubenswrapper[4652]: E1205 05:28:25.125526 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:26 crc kubenswrapper[4652]: I1205 05:28:26.125619 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:26 crc kubenswrapper[4652]: E1205 05:28:26.125772 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:27 crc kubenswrapper[4652]: I1205 05:28:27.125290 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:27 crc kubenswrapper[4652]: I1205 05:28:27.125362 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:27 crc kubenswrapper[4652]: I1205 05:28:27.125417 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:27 crc kubenswrapper[4652]: E1205 05:28:27.125452 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:27 crc kubenswrapper[4652]: E1205 05:28:27.125586 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:27 crc kubenswrapper[4652]: E1205 05:28:27.125701 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:28 crc kubenswrapper[4652]: I1205 05:28:28.124820 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:28 crc kubenswrapper[4652]: E1205 05:28:28.125853 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:28 crc kubenswrapper[4652]: I1205 05:28:28.146473 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=6.146459834 podStartE2EDuration="6.146459834s" podCreationTimestamp="2025-12-05 05:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:28.145877378 +0000 UTC m=+110.382607655" watchObservedRunningTime="2025-12-05 05:28:28.146459834 +0000 UTC m=+110.383190101" Dec 05 05:28:29 crc kubenswrapper[4652]: I1205 05:28:29.125745 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:29 crc kubenswrapper[4652]: E1205 05:28:29.125981 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:29 crc kubenswrapper[4652]: I1205 05:28:29.125805 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:29 crc kubenswrapper[4652]: E1205 05:28:29.126412 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:29 crc kubenswrapper[4652]: I1205 05:28:29.125788 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:29 crc kubenswrapper[4652]: E1205 05:28:29.126612 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:30 crc kubenswrapper[4652]: I1205 05:28:30.124891 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:30 crc kubenswrapper[4652]: E1205 05:28:30.125041 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:31 crc kubenswrapper[4652]: I1205 05:28:31.125302 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:31 crc kubenswrapper[4652]: I1205 05:28:31.125384 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:31 crc kubenswrapper[4652]: E1205 05:28:31.125436 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:31 crc kubenswrapper[4652]: I1205 05:28:31.125322 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:31 crc kubenswrapper[4652]: E1205 05:28:31.125709 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:31 crc kubenswrapper[4652]: E1205 05:28:31.125694 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:32 crc kubenswrapper[4652]: I1205 05:28:32.125155 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:32 crc kubenswrapper[4652]: E1205 05:28:32.125256 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:33 crc kubenswrapper[4652]: I1205 05:28:33.125545 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:33 crc kubenswrapper[4652]: E1205 05:28:33.125849 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:33 crc kubenswrapper[4652]: I1205 05:28:33.125593 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:33 crc kubenswrapper[4652]: E1205 05:28:33.126290 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:33 crc kubenswrapper[4652]: I1205 05:28:33.125587 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:33 crc kubenswrapper[4652]: E1205 05:28:33.126464 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:34 crc kubenswrapper[4652]: I1205 05:28:34.125475 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:34 crc kubenswrapper[4652]: E1205 05:28:34.125894 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:34 crc kubenswrapper[4652]: I1205 05:28:34.126121 4652 scope.go:117] "RemoveContainer" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" Dec 05 05:28:34 crc kubenswrapper[4652]: E1205 05:28:34.126282 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-94kb9_openshift-ovn-kubernetes(ab3e4ec7-1775-48b7-8848-a578578629df)\"" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.124936 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.125038 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.125067 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:35 crc kubenswrapper[4652]: E1205 05:28:35.125277 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:35 crc kubenswrapper[4652]: E1205 05:28:35.125552 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:35 crc kubenswrapper[4652]: E1205 05:28:35.125615 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.582076 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/1.log" Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.582678 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/0.log" Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.582782 4652 generic.go:334] "Generic (PLEG): container finished" podID="57ea6288-d271-498d-ad7e-aa90f3d433e4" containerID="282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2" exitCode=1 Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.582863 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-569vn" event={"ID":"57ea6288-d271-498d-ad7e-aa90f3d433e4","Type":"ContainerDied","Data":"282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2"} Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.582918 4652 scope.go:117] "RemoveContainer" containerID="33aec9471365d4e605d83cf387df82dbedbc1e80e16e311e99d64642e91107e3" Dec 05 05:28:35 crc kubenswrapper[4652]: I1205 05:28:35.583266 4652 scope.go:117] "RemoveContainer" containerID="282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2" Dec 05 05:28:35 crc kubenswrapper[4652]: E1205 05:28:35.583424 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-569vn_openshift-multus(57ea6288-d271-498d-ad7e-aa90f3d433e4)\"" pod="openshift-multus/multus-569vn" podUID="57ea6288-d271-498d-ad7e-aa90f3d433e4" Dec 05 05:28:36 crc kubenswrapper[4652]: I1205 05:28:36.125541 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:36 crc kubenswrapper[4652]: E1205 05:28:36.125715 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:36 crc kubenswrapper[4652]: I1205 05:28:36.585687 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/1.log" Dec 05 05:28:37 crc kubenswrapper[4652]: I1205 05:28:37.125004 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:37 crc kubenswrapper[4652]: I1205 05:28:37.125039 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:37 crc kubenswrapper[4652]: E1205 05:28:37.125114 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:37 crc kubenswrapper[4652]: I1205 05:28:37.125152 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:37 crc kubenswrapper[4652]: E1205 05:28:37.125214 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:37 crc kubenswrapper[4652]: E1205 05:28:37.125412 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:38 crc kubenswrapper[4652]: E1205 05:28:38.108630 4652 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 05:28:38 crc kubenswrapper[4652]: I1205 05:28:38.125305 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:38 crc kubenswrapper[4652]: E1205 05:28:38.126524 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:38 crc kubenswrapper[4652]: E1205 05:28:38.198403 4652 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 05:28:39 crc kubenswrapper[4652]: I1205 05:28:39.124849 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:39 crc kubenswrapper[4652]: E1205 05:28:39.125257 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:39 crc kubenswrapper[4652]: I1205 05:28:39.125290 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:39 crc kubenswrapper[4652]: I1205 05:28:39.125296 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:39 crc kubenswrapper[4652]: E1205 05:28:39.125396 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:39 crc kubenswrapper[4652]: E1205 05:28:39.125478 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:40 crc kubenswrapper[4652]: I1205 05:28:40.125643 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:40 crc kubenswrapper[4652]: E1205 05:28:40.125830 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:41 crc kubenswrapper[4652]: I1205 05:28:41.125069 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:41 crc kubenswrapper[4652]: I1205 05:28:41.125099 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:41 crc kubenswrapper[4652]: I1205 05:28:41.125070 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:41 crc kubenswrapper[4652]: E1205 05:28:41.125252 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:41 crc kubenswrapper[4652]: E1205 05:28:41.125406 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:41 crc kubenswrapper[4652]: E1205 05:28:41.125487 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:42 crc kubenswrapper[4652]: I1205 05:28:42.125096 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:42 crc kubenswrapper[4652]: E1205 05:28:42.125283 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:43 crc kubenswrapper[4652]: I1205 05:28:43.125059 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:43 crc kubenswrapper[4652]: I1205 05:28:43.125094 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:43 crc kubenswrapper[4652]: I1205 05:28:43.125114 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:43 crc kubenswrapper[4652]: E1205 05:28:43.125179 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:43 crc kubenswrapper[4652]: E1205 05:28:43.125262 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:43 crc kubenswrapper[4652]: E1205 05:28:43.125337 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:43 crc kubenswrapper[4652]: E1205 05:28:43.199705 4652 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 05:28:44 crc kubenswrapper[4652]: I1205 05:28:44.125598 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:44 crc kubenswrapper[4652]: E1205 05:28:44.125753 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:45 crc kubenswrapper[4652]: I1205 05:28:45.125248 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:45 crc kubenswrapper[4652]: I1205 05:28:45.125251 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:45 crc kubenswrapper[4652]: E1205 05:28:45.125388 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:45 crc kubenswrapper[4652]: I1205 05:28:45.125274 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:45 crc kubenswrapper[4652]: E1205 05:28:45.125499 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:45 crc kubenswrapper[4652]: E1205 05:28:45.125766 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:46 crc kubenswrapper[4652]: I1205 05:28:46.125666 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:46 crc kubenswrapper[4652]: E1205 05:28:46.125797 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:47 crc kubenswrapper[4652]: I1205 05:28:47.125248 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:47 crc kubenswrapper[4652]: I1205 05:28:47.125275 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:47 crc kubenswrapper[4652]: I1205 05:28:47.125347 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:47 crc kubenswrapper[4652]: E1205 05:28:47.125433 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:47 crc kubenswrapper[4652]: E1205 05:28:47.125598 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:47 crc kubenswrapper[4652]: E1205 05:28:47.125996 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:47 crc kubenswrapper[4652]: I1205 05:28:47.126025 4652 scope.go:117] "RemoveContainer" containerID="282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2" Dec 05 05:28:47 crc kubenswrapper[4652]: I1205 05:28:47.616710 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/1.log" Dec 05 05:28:47 crc kubenswrapper[4652]: I1205 05:28:47.617086 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-569vn" event={"ID":"57ea6288-d271-498d-ad7e-aa90f3d433e4","Type":"ContainerStarted","Data":"75fb6a2ee0d7dd0a6cf678e06118f8ae72f0a5ec41174418948a48ba3fbb15c1"} Dec 05 05:28:48 crc kubenswrapper[4652]: I1205 05:28:48.125968 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:48 crc kubenswrapper[4652]: E1205 05:28:48.126245 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:48 crc kubenswrapper[4652]: I1205 05:28:48.126390 4652 scope.go:117] "RemoveContainer" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" Dec 05 05:28:48 crc kubenswrapper[4652]: E1205 05:28:48.200044 4652 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 05:28:48 crc kubenswrapper[4652]: I1205 05:28:48.622864 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/3.log" Dec 05 05:28:48 crc kubenswrapper[4652]: I1205 05:28:48.626627 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerStarted","Data":"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d"} Dec 05 05:28:48 crc kubenswrapper[4652]: I1205 05:28:48.649704 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podStartSLOduration=106.649682712 podStartE2EDuration="1m46.649682712s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:28:48.649579789 +0000 UTC m=+130.886310056" watchObservedRunningTime="2025-12-05 05:28:48.649682712 +0000 UTC m=+130.886412979" Dec 05 05:28:48 crc kubenswrapper[4652]: I1205 05:28:48.824062 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-vjg6c"] Dec 05 05:28:48 crc kubenswrapper[4652]: I1205 05:28:48.824171 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:48 crc kubenswrapper[4652]: E1205 05:28:48.824276 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:49 crc kubenswrapper[4652]: I1205 05:28:49.124887 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:49 crc kubenswrapper[4652]: I1205 05:28:49.124941 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:49 crc kubenswrapper[4652]: I1205 05:28:49.124902 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:49 crc kubenswrapper[4652]: E1205 05:28:49.125021 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:49 crc kubenswrapper[4652]: E1205 05:28:49.125102 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:49 crc kubenswrapper[4652]: E1205 05:28:49.125167 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:50 crc kubenswrapper[4652]: I1205 05:28:50.124985 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:50 crc kubenswrapper[4652]: E1205 05:28:50.125216 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:51 crc kubenswrapper[4652]: I1205 05:28:51.125593 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:51 crc kubenswrapper[4652]: I1205 05:28:51.125593 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:51 crc kubenswrapper[4652]: I1205 05:28:51.125611 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:51 crc kubenswrapper[4652]: E1205 05:28:51.125718 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:51 crc kubenswrapper[4652]: E1205 05:28:51.125936 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:51 crc kubenswrapper[4652]: E1205 05:28:51.126020 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:52 crc kubenswrapper[4652]: I1205 05:28:52.124902 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:52 crc kubenswrapper[4652]: E1205 05:28:52.125014 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vjg6c" podUID="72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b" Dec 05 05:28:53 crc kubenswrapper[4652]: I1205 05:28:53.125256 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:53 crc kubenswrapper[4652]: E1205 05:28:53.125624 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:28:53 crc kubenswrapper[4652]: I1205 05:28:53.125306 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:53 crc kubenswrapper[4652]: I1205 05:28:53.125276 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:53 crc kubenswrapper[4652]: E1205 05:28:53.125691 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:28:53 crc kubenswrapper[4652]: E1205 05:28:53.125785 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:28:54 crc kubenswrapper[4652]: I1205 05:28:54.125584 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:28:54 crc kubenswrapper[4652]: I1205 05:28:54.126968 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 05:28:54 crc kubenswrapper[4652]: I1205 05:28:54.128131 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 05:28:54 crc kubenswrapper[4652]: I1205 05:28:54.804321 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:28:54 crc kubenswrapper[4652]: I1205 05:28:54.816217 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:28:55 crc kubenswrapper[4652]: I1205 05:28:55.125382 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:28:55 crc kubenswrapper[4652]: I1205 05:28:55.125380 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:28:55 crc kubenswrapper[4652]: I1205 05:28:55.125400 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:28:55 crc kubenswrapper[4652]: I1205 05:28:55.127280 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 05:28:55 crc kubenswrapper[4652]: I1205 05:28:55.127571 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 05:28:55 crc kubenswrapper[4652]: I1205 05:28:55.127696 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 05:28:55 crc kubenswrapper[4652]: I1205 05:28:55.128111 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.535945 4652 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.569135 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6cfcs"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.570161 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.570168 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gdjlr"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.570649 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.570852 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5mktp"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.571361 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.571455 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hb9pn"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.571875 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.572065 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-pjnkl"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.572382 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.573675 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.575627 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.578026 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.580888 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.581295 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.581864 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.581892 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-9nh89"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.582753 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.583039 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4kx7t"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.590484 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.590867 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.590934 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.590975 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.591009 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.591029 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.591004 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.591113 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pslqt"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.591477 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.592513 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7lwtl"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.592824 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mvtf6"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.593156 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.593405 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7lwtl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.594124 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.594149 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.594534 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.595230 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.595462 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.595614 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.595960 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.596640 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.596749 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.596879 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.596893 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5d42v"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.596885 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597074 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597098 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597181 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597208 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597473 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597824 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597850 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597890 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597853 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.597960 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.598595 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.598815 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.598947 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n2n75"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.599017 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.599420 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.599903 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.600287 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.600643 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.601000 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fjbx8"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.601333 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.601476 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.601618 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.602100 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.603993 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.604167 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.604266 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.604431 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.604486 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.604550 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.604719 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.604809 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.604921 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.605033 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.605139 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.605230 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.605366 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.605511 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.605960 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.606098 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.606260 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.606364 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.606486 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.606605 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.606727 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.606843 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.607482 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.607708 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.607940 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6hfpm"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.608125 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.608217 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.608267 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.608387 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.608393 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.610342 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.610618 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.610746 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.610775 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611106 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611132 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611259 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611322 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611417 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611511 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611633 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611760 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611808 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611853 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611946 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-cgsmk"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612178 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612429 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.619773 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611961 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.622515 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.611989 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612001 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612048 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612052 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612232 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612239 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612286 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612293 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612318 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612345 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612354 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612415 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612444 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.612501 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.624842 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.619718 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.626426 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.620245 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.627085 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.628200 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.628288 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.629863 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.630497 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.631002 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.631280 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.647274 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.648705 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.649120 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.649708 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.649926 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-client-ca\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650019 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/41f2faf0-6ed1-4043-8687-13dd1ef0788b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4z8lw\" (UID: \"41f2faf0-6ed1-4043-8687-13dd1ef0788b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650092 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxtm8\" (UniqueName: \"kubernetes.io/projected/60243fa4-404c-47cc-b17e-0a338bb89b54-kube-api-access-wxtm8\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650150 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650154 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zk2r\" (UniqueName: \"kubernetes.io/projected/a2802700-9f5c-4987-905b-625784a96a37-kube-api-access-8zk2r\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650297 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-dir\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650369 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7220a624-ba75-4a01-9bcc-71450af1e114-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650410 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650465 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650437 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cc547d04-f064-455f-b8d3-1d5627a82ca4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fjbx8\" (UID: \"cc547d04-f064-455f-b8d3-1d5627a82ca4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650609 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650697 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650771 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtp4q\" (UniqueName: \"kubernetes.io/projected/e26e158c-7bd6-45bd-be06-8cbe6c1d4912-kube-api-access-mtp4q\") pod \"downloads-7954f5f757-7lwtl\" (UID: \"e26e158c-7bd6-45bd-be06-8cbe6c1d4912\") " pod="openshift-console/downloads-7954f5f757-7lwtl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650845 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-etcd-service-ca\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650916 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/a2802700-9f5c-4987-905b-625784a96a37-machine-approver-tls\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650984 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv552\" (UniqueName: \"kubernetes.io/projected/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-kube-api-access-vv552\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.651058 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-policies\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.651123 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28425929-c82a-4d16-a278-1cf2786276e5-serving-cert\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.651206 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d37a7d0d-0a12-40d2-b970-90394f2c11a8-trusted-ca\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650867 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.651105 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650648 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650672 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.651165 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.650631 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.651283 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-service-ca\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.651944 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.651944 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-oauth-serving-cert\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652031 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-config\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652051 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jmsk\" (UniqueName: \"kubernetes.io/projected/28425929-c82a-4d16-a278-1cf2786276e5-kube-api-access-9jmsk\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652066 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-encryption-config\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652080 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7dac1b70-3636-47c4-be0b-a798cb11a6e7-audit-dir\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652096 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-oauth-config\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652117 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-config\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652152 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-service-ca-bundle\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652188 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652216 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-encryption-config\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652235 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-config\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652250 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2802700-9f5c-4987-905b-625784a96a37-config\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652268 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652285 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-etcd-serving-ca\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652309 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-config\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652336 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652338 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gfb8\" (UniqueName: \"kubernetes.io/projected/6b251475-c037-4453-8f24-406781fabc44-kube-api-access-8gfb8\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652409 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-serving-cert\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652429 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-serving-cert\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652443 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3541b892-0b8c-4319-b4fa-b4d34cca9e18-config\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652457 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-config\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652469 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-audit\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652486 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-267rl\" (UniqueName: \"kubernetes.io/projected/d37a7d0d-0a12-40d2-b970-90394f2c11a8-kube-api-access-267rl\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652500 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4jfj\" (UniqueName: \"kubernetes.io/projected/7dac1b70-3636-47c4-be0b-a798cb11a6e7-kube-api-access-s4jfj\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652516 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652530 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652544 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn7r6\" (UniqueName: \"kubernetes.io/projected/3440a79c-9036-4595-9967-ee9e5ae118c9-kube-api-access-kn7r6\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652578 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-image-import-ca\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652596 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7frp\" (UniqueName: \"kubernetes.io/projected/41f2faf0-6ed1-4043-8687-13dd1ef0788b-kube-api-access-t7frp\") pod \"cluster-samples-operator-665b6dd947-4z8lw\" (UID: \"41f2faf0-6ed1-4043-8687-13dd1ef0788b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652613 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3440a79c-9036-4595-9967-ee9e5ae118c9-serving-cert\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652641 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmwqd\" (UniqueName: \"kubernetes.io/projected/3541b892-0b8c-4319-b4fa-b4d34cca9e18-kube-api-access-mmwqd\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652700 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-serving-cert\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652764 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk2tq\" (UniqueName: \"kubernetes.io/projected/f4bd4318-0406-40e2-8b50-b79c312bb10a-kube-api-access-kk2tq\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652812 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-config\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652834 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652853 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652872 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8l5f\" (UniqueName: \"kubernetes.io/projected/8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60-kube-api-access-v8l5f\") pod \"dns-operator-744455d44c-4kx7t\" (UID: \"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60\") " pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652890 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-audit-policies\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652944 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652962 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d37a7d0d-0a12-40d2-b970-90394f2c11a8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652977 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7dac1b70-3636-47c4-be0b-a798cb11a6e7-node-pullsecrets\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.652994 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653018 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw8ld\" (UniqueName: \"kubernetes.io/projected/c435a822-7efb-4d40-b374-9523f145f48a-kube-api-access-tw8ld\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653035 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-client-ca\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653052 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d37a7d0d-0a12-40d2-b970-90394f2c11a8-metrics-tls\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653074 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60-metrics-tls\") pod \"dns-operator-744455d44c-4kx7t\" (UID: \"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60\") " pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653094 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-etcd-client\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653110 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653124 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-trusted-ca-bundle\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653140 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2802700-9f5c-4987-905b-625784a96a37-auth-proxy-config\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653179 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7220a624-ba75-4a01-9bcc-71450af1e114-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653225 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cdvx\" (UniqueName: \"kubernetes.io/projected/31d139be-8c3b-4ff9-9e9d-872906e7a547-kube-api-access-6cdvx\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653240 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/28425929-c82a-4d16-a278-1cf2786276e5-etcd-client\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653257 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3440a79c-9036-4595-9967-ee9e5ae118c9-trusted-ca\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653278 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653299 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653313 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653350 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60243fa4-404c-47cc-b17e-0a338bb89b54-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653380 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7220a624-ba75-4a01-9bcc-71450af1e114-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653400 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-serving-cert\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653418 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60243fa4-404c-47cc-b17e-0a338bb89b54-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653447 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47njp\" (UniqueName: \"kubernetes.io/projected/d2679bc9-2b4f-4c24-9625-447a63d8ac59-kube-api-access-47njp\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653467 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b251475-c037-4453-8f24-406781fabc44-serving-cert\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653482 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3541b892-0b8c-4319-b4fa-b4d34cca9e18-images\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653502 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d2679bc9-2b4f-4c24-9625-447a63d8ac59-audit-dir\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653567 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-available-featuregates\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653587 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3440a79c-9036-4595-9967-ee9e5ae118c9-config\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653615 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-etcd-client\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653646 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c435a822-7efb-4d40-b374-9523f145f48a-serving-cert\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653664 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653678 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31d139be-8c3b-4ff9-9e9d-872906e7a547-serving-cert\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653698 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3541b892-0b8c-4319-b4fa-b4d34cca9e18-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653730 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf45j\" (UniqueName: \"kubernetes.io/projected/15cb8768-99e0-4907-af4d-0167fff40d3f-kube-api-access-vf45j\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653749 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-etcd-ca\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653769 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653787 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvn9p\" (UniqueName: \"kubernetes.io/projected/7220a624-ba75-4a01-9bcc-71450af1e114-kube-api-access-nvn9p\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.653805 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfdzz\" (UniqueName: \"kubernetes.io/projected/cc547d04-f064-455f-b8d3-1d5627a82ca4-kube-api-access-qfdzz\") pod \"multus-admission-controller-857f4d67dd-fjbx8\" (UID: \"cc547d04-f064-455f-b8d3-1d5627a82ca4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.656318 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.656546 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.656692 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.657100 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.657492 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.657946 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.657965 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.658145 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.658609 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.659172 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.660352 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.661912 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.662081 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5ksc5"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.662781 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.664155 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.664251 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.664581 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.664807 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.664939 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6cfcs"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.664992 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.675834 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.678063 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.678440 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.679463 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.681753 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-7qc7b"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.683604 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.684864 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.685494 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gdjlr"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.687056 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hb9pn"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.689918 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.690622 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.693033 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fjbx8"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.693060 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n2n75"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.693073 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.694888 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-9nh89"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.694940 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7lwtl"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.696020 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.696094 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-pjnkl"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.699459 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pslqt"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.699522 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.702528 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mvtf6"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.703350 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.704464 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5mktp"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.705129 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.705735 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.706551 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.707462 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.708088 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.709032 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.721852 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.721880 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.721892 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.724471 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5d42v"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.724494 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.724504 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.727986 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.731355 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5ksc5"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.731776 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.732295 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6hfpm"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.733433 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4kx7t"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.734832 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.734861 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.735094 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.735165 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7qc7b"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.737595 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.737622 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-jwzzn"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.738158 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-jwzzn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.745050 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-4fnfv"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.745515 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-jwzzn"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.745601 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.751472 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754391 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-serving-cert\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754477 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3541b892-0b8c-4319-b4fa-b4d34cca9e18-config\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754500 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-config\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754585 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-audit\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754604 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-267rl\" (UniqueName: \"kubernetes.io/projected/d37a7d0d-0a12-40d2-b970-90394f2c11a8-kube-api-access-267rl\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754625 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4jfj\" (UniqueName: \"kubernetes.io/projected/7dac1b70-3636-47c4-be0b-a798cb11a6e7-kube-api-access-s4jfj\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754647 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754665 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754681 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn7r6\" (UniqueName: \"kubernetes.io/projected/3440a79c-9036-4595-9967-ee9e5ae118c9-kube-api-access-kn7r6\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754703 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-image-import-ca\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754722 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7frp\" (UniqueName: \"kubernetes.io/projected/41f2faf0-6ed1-4043-8687-13dd1ef0788b-kube-api-access-t7frp\") pod \"cluster-samples-operator-665b6dd947-4z8lw\" (UID: \"41f2faf0-6ed1-4043-8687-13dd1ef0788b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754739 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3440a79c-9036-4595-9967-ee9e5ae118c9-serving-cert\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754768 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmwqd\" (UniqueName: \"kubernetes.io/projected/3541b892-0b8c-4319-b4fa-b4d34cca9e18-kube-api-access-mmwqd\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754788 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-serving-cert\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754804 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk2tq\" (UniqueName: \"kubernetes.io/projected/f4bd4318-0406-40e2-8b50-b79c312bb10a-kube-api-access-kk2tq\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754835 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-config\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754853 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754875 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754893 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8l5f\" (UniqueName: \"kubernetes.io/projected/8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60-kube-api-access-v8l5f\") pod \"dns-operator-744455d44c-4kx7t\" (UID: \"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60\") " pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754913 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-audit-policies\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754941 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754962 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d37a7d0d-0a12-40d2-b970-90394f2c11a8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754978 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7dac1b70-3636-47c4-be0b-a798cb11a6e7-node-pullsecrets\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.754995 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755015 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw8ld\" (UniqueName: \"kubernetes.io/projected/c435a822-7efb-4d40-b374-9523f145f48a-kube-api-access-tw8ld\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755035 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-client-ca\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755051 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d37a7d0d-0a12-40d2-b970-90394f2c11a8-metrics-tls\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755070 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60-metrics-tls\") pod \"dns-operator-744455d44c-4kx7t\" (UID: \"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60\") " pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755090 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-etcd-client\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755109 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755652 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3541b892-0b8c-4319-b4fa-b4d34cca9e18-config\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755835 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-image-import-ca\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.755860 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.756284 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.756702 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-audit-policies\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.756769 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7dac1b70-3636-47c4-be0b-a798cb11a6e7-node-pullsecrets\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.756775 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-config\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.757282 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-audit\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.757293 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.757686 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.757752 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2679bc9-2b4f-4c24-9625-447a63d8ac59-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.758378 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-trusted-ca-bundle\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.758610 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-client-ca\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759343 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-trusted-ca-bundle\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759404 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2802700-9f5c-4987-905b-625784a96a37-auth-proxy-config\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759429 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7220a624-ba75-4a01-9bcc-71450af1e114-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759476 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cdvx\" (UniqueName: \"kubernetes.io/projected/31d139be-8c3b-4ff9-9e9d-872906e7a547-kube-api-access-6cdvx\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759496 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/28425929-c82a-4d16-a278-1cf2786276e5-etcd-client\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759512 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3440a79c-9036-4595-9967-ee9e5ae118c9-trusted-ca\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759530 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759587 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759607 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759646 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60243fa4-404c-47cc-b17e-0a338bb89b54-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759670 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7220a624-ba75-4a01-9bcc-71450af1e114-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.759686 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-serving-cert\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760002 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a2802700-9f5c-4987-905b-625784a96a37-auth-proxy-config\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760127 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60243fa4-404c-47cc-b17e-0a338bb89b54-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760155 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47njp\" (UniqueName: \"kubernetes.io/projected/d2679bc9-2b4f-4c24-9625-447a63d8ac59-kube-api-access-47njp\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760171 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b251475-c037-4453-8f24-406781fabc44-serving-cert\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760219 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3541b892-0b8c-4319-b4fa-b4d34cca9e18-images\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760238 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d2679bc9-2b4f-4c24-9625-447a63d8ac59-audit-dir\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760268 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-available-featuregates\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760317 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3440a79c-9036-4595-9967-ee9e5ae118c9-config\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760336 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-etcd-client\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760383 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c435a822-7efb-4d40-b374-9523f145f48a-serving-cert\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760401 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760416 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31d139be-8c3b-4ff9-9e9d-872906e7a547-serving-cert\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760433 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3541b892-0b8c-4319-b4fa-b4d34cca9e18-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760470 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf45j\" (UniqueName: \"kubernetes.io/projected/15cb8768-99e0-4907-af4d-0167fff40d3f-kube-api-access-vf45j\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760488 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-etcd-ca\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760506 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760522 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvn9p\" (UniqueName: \"kubernetes.io/projected/7220a624-ba75-4a01-9bcc-71450af1e114-kube-api-access-nvn9p\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760585 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfdzz\" (UniqueName: \"kubernetes.io/projected/cc547d04-f064-455f-b8d3-1d5627a82ca4-kube-api-access-qfdzz\") pod \"multus-admission-controller-857f4d67dd-fjbx8\" (UID: \"cc547d04-f064-455f-b8d3-1d5627a82ca4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760609 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-client-ca\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760627 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/41f2faf0-6ed1-4043-8687-13dd1ef0788b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4z8lw\" (UID: \"41f2faf0-6ed1-4043-8687-13dd1ef0788b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760644 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxtm8\" (UniqueName: \"kubernetes.io/projected/60243fa4-404c-47cc-b17e-0a338bb89b54-kube-api-access-wxtm8\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760676 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zk2r\" (UniqueName: \"kubernetes.io/projected/a2802700-9f5c-4987-905b-625784a96a37-kube-api-access-8zk2r\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760701 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-dir\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760717 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7220a624-ba75-4a01-9bcc-71450af1e114-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760757 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cc547d04-f064-455f-b8d3-1d5627a82ca4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fjbx8\" (UID: \"cc547d04-f064-455f-b8d3-1d5627a82ca4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760777 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760794 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760828 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtp4q\" (UniqueName: \"kubernetes.io/projected/e26e158c-7bd6-45bd-be06-8cbe6c1d4912-kube-api-access-mtp4q\") pod \"downloads-7954f5f757-7lwtl\" (UID: \"e26e158c-7bd6-45bd-be06-8cbe6c1d4912\") " pod="openshift-console/downloads-7954f5f757-7lwtl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760827 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3440a79c-9036-4595-9967-ee9e5ae118c9-serving-cert\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760853 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-etcd-service-ca\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760857 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760905 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/a2802700-9f5c-4987-905b-625784a96a37-machine-approver-tls\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760942 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv552\" (UniqueName: \"kubernetes.io/projected/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-kube-api-access-vv552\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760977 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-policies\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760995 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28425929-c82a-4d16-a278-1cf2786276e5-serving-cert\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761016 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d37a7d0d-0a12-40d2-b970-90394f2c11a8-trusted-ca\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761035 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-service-ca\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761053 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-oauth-serving-cert\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761075 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761089 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-config\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761111 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jmsk\" (UniqueName: \"kubernetes.io/projected/28425929-c82a-4d16-a278-1cf2786276e5-kube-api-access-9jmsk\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761129 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-encryption-config\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761148 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7dac1b70-3636-47c4-be0b-a798cb11a6e7-audit-dir\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761163 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-oauth-config\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761184 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-config\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761215 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-service-ca-bundle\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761233 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762267 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-encryption-config\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762295 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-config\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762387 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2802700-9f5c-4987-905b-625784a96a37-config\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762505 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3440a79c-9036-4595-9967-ee9e5ae118c9-trusted-ca\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761978 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-oauth-serving-cert\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762048 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-etcd-ca\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762646 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7220a624-ba75-4a01-9bcc-71450af1e114-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762747 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-serving-cert\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762907 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-etcd-service-ca\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.762962 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763023 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-etcd-serving-ca\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763077 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-config\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763129 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gfb8\" (UniqueName: \"kubernetes.io/projected/6b251475-c037-4453-8f24-406781fabc44-kube-api-access-8gfb8\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763150 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-serving-cert\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763164 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7220a624-ba75-4a01-9bcc-71450af1e114-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763215 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-available-featuregates\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.760579 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60-metrics-tls\") pod \"dns-operator-744455d44c-4kx7t\" (UID: \"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60\") " pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763742 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28425929-c82a-4d16-a278-1cf2786276e5-config\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763855 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-client-ca\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.763882 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-serving-cert\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.764463 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7dac1b70-3636-47c4-be0b-a798cb11a6e7-audit-dir\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.764545 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-serving-cert\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761601 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-dir\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.764743 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.765489 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31d139be-8c3b-4ff9-9e9d-872906e7a547-serving-cert\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.765517 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3440a79c-9036-4595-9967-ee9e5ae118c9-config\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761384 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-etcd-client\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761916 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-policies\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.765725 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-config\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.765853 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.766054 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.766057 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7dac1b70-3636-47c4-be0b-a798cb11a6e7-etcd-serving-ca\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.761443 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.766128 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2802700-9f5c-4987-905b-625784a96a37-config\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.766591 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-service-ca\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.766696 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-service-ca-bundle\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.766777 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d2679bc9-2b4f-4c24-9625-447a63d8ac59-audit-dir\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.766876 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c435a822-7efb-4d40-b374-9523f145f48a-config\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.767048 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3541b892-0b8c-4319-b4fa-b4d34cca9e18-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.767134 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.767189 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/28425929-c82a-4d16-a278-1cf2786276e5-etcd-client\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.767327 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3541b892-0b8c-4319-b4fa-b4d34cca9e18-images\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.768148 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-encryption-config\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.768300 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7dac1b70-3636-47c4-be0b-a798cb11a6e7-encryption-config\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.768494 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.768507 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.768513 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/41f2faf0-6ed1-4043-8687-13dd1ef0788b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4z8lw\" (UID: \"41f2faf0-6ed1-4043-8687-13dd1ef0788b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.768982 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-config\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.768987 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/a2802700-9f5c-4987-905b-625784a96a37-machine-approver-tls\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.769424 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.769709 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-serving-cert\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.769869 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b251475-c037-4453-8f24-406781fabc44-serving-cert\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.769990 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d2679bc9-2b4f-4c24-9625-447a63d8ac59-etcd-client\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.770932 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-oauth-config\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.771482 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c435a822-7efb-4d40-b374-9523f145f48a-serving-cert\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.771538 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-config\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.772618 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.777126 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-26kkh"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.777669 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28425929-c82a-4d16-a278-1cf2786276e5-serving-cert\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.778358 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.778763 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.784398 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-26kkh"] Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.792801 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.804080 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/cc547d04-f064-455f-b8d3-1d5627a82ca4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fjbx8\" (UID: \"cc547d04-f064-455f-b8d3-1d5627a82ca4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.812310 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.831800 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.851295 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.860503 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d37a7d0d-0a12-40d2-b970-90394f2c11a8-metrics-tls\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.871456 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.897567 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.906485 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d37a7d0d-0a12-40d2-b970-90394f2c11a8-trusted-ca\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.912056 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.931382 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.952340 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.971669 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.975178 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60243fa4-404c-47cc-b17e-0a338bb89b54-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:01 crc kubenswrapper[4652]: I1205 05:29:01.991698 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.001749 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60243fa4-404c-47cc-b17e-0a338bb89b54-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.011610 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.072427 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.091718 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.112443 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.132629 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.151257 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.172367 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.192105 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.212281 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.231352 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.252304 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.272185 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.291576 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.312240 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.331664 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.351573 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.372133 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.392096 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.411689 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.432222 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.451970 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.471497 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.491342 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.511899 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.531656 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.551978 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.572081 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.592544 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.611829 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.631901 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.651493 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.670638 4652 request.go:700] Waited for 1.018710008s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.671453 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.691449 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.712029 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.731896 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.751646 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.772551 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.791425 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.812290 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.831634 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.851478 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.872389 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.891720 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.911361 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.932090 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.951973 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.971930 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.974246 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:02 crc kubenswrapper[4652]: E1205 05:29:02.974490 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:31:04.974457603 +0000 UTC m=+267.211187870 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.974601 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.974679 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.975411 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.978068 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:29:02 crc kubenswrapper[4652]: I1205 05:29:02.992201 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.011660 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.032712 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.052446 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.071961 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.075309 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.075383 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.078893 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.079120 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.092322 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.111341 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.131727 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.151746 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.171456 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.192952 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.212156 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.232263 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.236439 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.242853 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.245734 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.251867 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.280144 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.291834 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.311743 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.332507 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.352310 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.372354 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.393894 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.412080 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.432064 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.451920 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.472689 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.492410 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.511497 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.532349 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.566887 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4jfj\" (UniqueName: \"kubernetes.io/projected/7dac1b70-3636-47c4-be0b-a798cb11a6e7-kube-api-access-s4jfj\") pod \"apiserver-76f77b778f-hb9pn\" (UID: \"7dac1b70-3636-47c4-be0b-a798cb11a6e7\") " pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.582958 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8l5f\" (UniqueName: \"kubernetes.io/projected/8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60-kube-api-access-v8l5f\") pod \"dns-operator-744455d44c-4kx7t\" (UID: \"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60\") " pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.603985 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn7r6\" (UniqueName: \"kubernetes.io/projected/3440a79c-9036-4595-9967-ee9e5ae118c9-kube-api-access-kn7r6\") pod \"console-operator-58897d9998-mvtf6\" (UID: \"3440a79c-9036-4595-9967-ee9e5ae118c9\") " pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.625220 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw8ld\" (UniqueName: \"kubernetes.io/projected/c435a822-7efb-4d40-b374-9523f145f48a-kube-api-access-tw8ld\") pod \"authentication-operator-69f744f599-gdjlr\" (UID: \"c435a822-7efb-4d40-b374-9523f145f48a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:03 crc kubenswrapper[4652]: W1205 05:29:03.631538 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-c81c04535af7a105cbd25e165d1ea7205aeeef1f2131d0cc44459dcd6909a1b5 WatchSource:0}: Error finding container c81c04535af7a105cbd25e165d1ea7205aeeef1f2131d0cc44459dcd6909a1b5: Status 404 returned error can't find the container with id c81c04535af7a105cbd25e165d1ea7205aeeef1f2131d0cc44459dcd6909a1b5 Dec 05 05:29:03 crc kubenswrapper[4652]: W1205 05:29:03.637168 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-69e6c9f31ffdd835d98edbd0057d72006aa7f8e0925852fc6686045fbc4e1c7e WatchSource:0}: Error finding container 69e6c9f31ffdd835d98edbd0057d72006aa7f8e0925852fc6686045fbc4e1c7e: Status 404 returned error can't find the container with id 69e6c9f31ffdd835d98edbd0057d72006aa7f8e0925852fc6686045fbc4e1c7e Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.646812 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7frp\" (UniqueName: \"kubernetes.io/projected/41f2faf0-6ed1-4043-8687-13dd1ef0788b-kube-api-access-t7frp\") pod \"cluster-samples-operator-665b6dd947-4z8lw\" (UID: \"41f2faf0-6ed1-4043-8687-13dd1ef0788b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.665454 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d37a7d0d-0a12-40d2-b970-90394f2c11a8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.673278 4652 request.go:700] Waited for 1.91593641s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/serviceaccounts/machine-api-operator/token Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.675092 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"69e6c9f31ffdd835d98edbd0057d72006aa7f8e0925852fc6686045fbc4e1c7e"} Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.677122 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0642eb723b6a5d7b2d473d9098355cf1f8323ac12de908bfbf82fcdb3d6530ea"} Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.677165 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c868c601c13e750738102da4cff30d64a19ef02e95c8aa0bafc02cae0e853fb1"} Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.678542 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c81c04535af7a105cbd25e165d1ea7205aeeef1f2131d0cc44459dcd6909a1b5"} Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.686547 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmwqd\" (UniqueName: \"kubernetes.io/projected/3541b892-0b8c-4319-b4fa-b4d34cca9e18-kube-api-access-mmwqd\") pod \"machine-api-operator-5694c8668f-6cfcs\" (UID: \"3541b892-0b8c-4319-b4fa-b4d34cca9e18\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.688501 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.699360 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.703253 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-267rl\" (UniqueName: \"kubernetes.io/projected/d37a7d0d-0a12-40d2-b970-90394f2c11a8-kube-api-access-267rl\") pod \"ingress-operator-5b745b69d9-dtnxw\" (UID: \"d37a7d0d-0a12-40d2-b970-90394f2c11a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.724524 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk2tq\" (UniqueName: \"kubernetes.io/projected/f4bd4318-0406-40e2-8b50-b79c312bb10a-kube-api-access-kk2tq\") pod \"console-f9d7485db-pjnkl\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.732898 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.746763 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cdvx\" (UniqueName: \"kubernetes.io/projected/31d139be-8c3b-4ff9-9e9d-872906e7a547-kube-api-access-6cdvx\") pod \"controller-manager-879f6c89f-5mktp\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.753798 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.766151 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf45j\" (UniqueName: \"kubernetes.io/projected/15cb8768-99e0-4907-af4d-0167fff40d3f-kube-api-access-vf45j\") pod \"oauth-openshift-558db77b4-pslqt\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.766912 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.787636 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7220a624-ba75-4a01-9bcc-71450af1e114-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.806520 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxtm8\" (UniqueName: \"kubernetes.io/projected/60243fa4-404c-47cc-b17e-0a338bb89b54-kube-api-access-wxtm8\") pod \"openshift-apiserver-operator-796bbdcf4f-jgztd\" (UID: \"60243fa4-404c-47cc-b17e-0a338bb89b54\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.827067 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zk2r\" (UniqueName: \"kubernetes.io/projected/a2802700-9f5c-4987-905b-625784a96a37-kube-api-access-8zk2r\") pod \"machine-approver-56656f9798-lp26s\" (UID: \"a2802700-9f5c-4987-905b-625784a96a37\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.828909 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.838214 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.846243 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6cfcs"] Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.850830 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.853998 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv552\" (UniqueName: \"kubernetes.io/projected/f8b5481d-4d3c-47e4-8e9d-c3e70ed49726-kube-api-access-vv552\") pod \"openshift-config-operator-7777fb866f-9nh89\" (UID: \"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.864654 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47njp\" (UniqueName: \"kubernetes.io/projected/d2679bc9-2b4f-4c24-9625-447a63d8ac59-kube-api-access-47njp\") pod \"apiserver-7bbb656c7d-6qfmb\" (UID: \"d2679bc9-2b4f-4c24-9625-447a63d8ac59\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.881256 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gdjlr"] Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.884536 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvn9p\" (UniqueName: \"kubernetes.io/projected/7220a624-ba75-4a01-9bcc-71450af1e114-kube-api-access-nvn9p\") pod \"cluster-image-registry-operator-dc59b4c8b-bm6ft\" (UID: \"7220a624-ba75-4a01-9bcc-71450af1e114\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:03 crc kubenswrapper[4652]: W1205 05:29:03.890708 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc435a822_7efb_4d40_b374_9523f145f48a.slice/crio-a0aa20a54d9eebaa5ea2da00cbe0238c6a1cced2a0d24803e7678133fc0e992d WatchSource:0}: Error finding container a0aa20a54d9eebaa5ea2da00cbe0238c6a1cced2a0d24803e7678133fc0e992d: Status 404 returned error can't find the container with id a0aa20a54d9eebaa5ea2da00cbe0238c6a1cced2a0d24803e7678133fc0e992d Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.894414 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.903762 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfdzz\" (UniqueName: \"kubernetes.io/projected/cc547d04-f064-455f-b8d3-1d5627a82ca4-kube-api-access-qfdzz\") pod \"multus-admission-controller-857f4d67dd-fjbx8\" (UID: \"cc547d04-f064-455f-b8d3-1d5627a82ca4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.920157 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.925180 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.929930 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jmsk\" (UniqueName: \"kubernetes.io/projected/28425929-c82a-4d16-a278-1cf2786276e5-kube-api-access-9jmsk\") pod \"etcd-operator-b45778765-5d42v\" (UID: \"28425929-c82a-4d16-a278-1cf2786276e5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.932812 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.945045 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.955292 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hb9pn"] Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.956742 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gfb8\" (UniqueName: \"kubernetes.io/projected/6b251475-c037-4453-8f24-406781fabc44-kube-api-access-8gfb8\") pod \"route-controller-manager-6576b87f9c-9lzgg\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.978021 4652 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.986240 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtp4q\" (UniqueName: \"kubernetes.io/projected/e26e158c-7bd6-45bd-be06-8cbe6c1d4912-kube-api-access-mtp4q\") pod \"downloads-7954f5f757-7lwtl\" (UID: \"e26e158c-7bd6-45bd-be06-8cbe6c1d4912\") " pod="openshift-console/downloads-7954f5f757-7lwtl" Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.997754 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw"] Dec 05 05:29:03 crc kubenswrapper[4652]: I1205 05:29:03.997902 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.014171 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.029633 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.057157 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.062543 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-pjnkl"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.085833 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.086030 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-tls\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.086088 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.086113 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.086168 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-trusted-ca\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.086188 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnn4j\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-kube-api-access-mnn4j\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.086218 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-certificates\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.086285 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-bound-sa-token\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.086771 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:04.586754712 +0000 UTC m=+146.823484979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: W1205 05:29:04.093737 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4bd4318_0406_40e2_8b50_b79c312bb10a.slice/crio-598e5af9201d9ee89551dc66cae596f3368ffab8ca016b9d0d36810283a61f8a WatchSource:0}: Error finding container 598e5af9201d9ee89551dc66cae596f3368ffab8ca016b9d0d36810283a61f8a: Status 404 returned error can't find the container with id 598e5af9201d9ee89551dc66cae596f3368ffab8ca016b9d0d36810283a61f8a Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.098458 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.101311 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4kx7t"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.105865 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.150182 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.150501 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.157271 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7lwtl" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187141 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187375 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-serving-cert\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187404 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187435 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k57z2\" (UniqueName: \"kubernetes.io/projected/d3688b95-25a0-4f1c-87c7-dfb4d913a451-kube-api-access-k57z2\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187458 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c118e33-8f43-4a93-9fba-c4562daa381b-config-volume\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187510 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d8a4eb0-eb4c-4b60-b001-9aa831333db8-cert\") pod \"ingress-canary-jwzzn\" (UID: \"5d8a4eb0-eb4c-4b60-b001-9aa831333db8\") " pod="openshift-ingress-canary/ingress-canary-jwzzn" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187527 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzctx\" (UniqueName: \"kubernetes.io/projected/939979d1-277a-4157-bc3a-8f765bd38e45-kube-api-access-bzctx\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187547 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-config\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187584 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-signing-cabundle\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187634 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a5a271e-1e58-4533-8c2a-d06653d75f2c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187726 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a2ed5e38-62e9-493a-8784-278a6d52fb2a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187781 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndkhk\" (UniqueName: \"kubernetes.io/projected/d8b9d5f4-186a-4646-ab32-0f3c63e23676-kube-api-access-ndkhk\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187820 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wq4t\" (UniqueName: \"kubernetes.io/projected/9c118e33-8f43-4a93-9fba-c4562daa381b-kube-api-access-7wq4t\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187836 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j9jq\" (UniqueName: \"kubernetes.io/projected/7207e17a-8352-4874-8fd7-dacd6763098f-kube-api-access-2j9jq\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187861 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6dv9\" (UniqueName: \"kubernetes.io/projected/fcbda467-98bc-40e1-a749-2f7ba3c9d331-kube-api-access-n6dv9\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187879 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187900 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187933 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/939979d1-277a-4157-bc3a-8f765bd38e45-node-bootstrap-token\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.187982 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dltl\" (UniqueName: \"kubernetes.io/projected/f9db05ec-e04b-409c-9523-4b102664127d-kube-api-access-9dltl\") pod \"migrator-59844c95c7-dz64t\" (UID: \"f9db05ec-e04b-409c-9523-4b102664127d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188002 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-mountpoint-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188018 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ed5e38-62e9-493a-8784-278a6d52fb2a-config\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188036 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fc94g\" (UID: \"cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188053 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3d5f918e-f713-40d2-9a2b-447c1aff6963-tmpfs\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188133 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/05106619-f9ca-428e-9e93-30c9bc9c2218-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188162 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-trusted-ca\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188179 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fftrc\" (UniqueName: \"kubernetes.io/projected/cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6-kube-api-access-fftrc\") pod \"control-plane-machine-set-operator-78cbb6b69f-fc94g\" (UID: \"cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188206 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-metrics-certs\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188225 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5c553f4a-55fd-47f8-9c30-1cd4da3c70d8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cgndz\" (UID: \"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188245 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhvqv\" (UniqueName: \"kubernetes.io/projected/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-kube-api-access-fhvqv\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188261 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-plugins-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188281 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8vmk\" (UniqueName: \"kubernetes.io/projected/2af0519d-fd20-46ec-a6d6-4c2532918731-kube-api-access-l8vmk\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188297 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-stats-auth\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188315 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9kwm\" (UniqueName: \"kubernetes.io/projected/4a5a271e-1e58-4533-8c2a-d06653d75f2c-kube-api-access-c9kwm\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188341 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-socket-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188377 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-certificates\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188394 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-csi-data-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188410 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdk4n\" (UniqueName: \"kubernetes.io/projected/3d5f918e-f713-40d2-9a2b-447c1aff6963-kube-api-access-kdk4n\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188443 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7207e17a-8352-4874-8fd7-dacd6763098f-metrics-tls\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188459 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c118e33-8f43-4a93-9fba-c4562daa381b-secret-volume\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188475 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-default-certificate\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188523 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188591 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-signing-key\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188658 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-service-ca-bundle\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188702 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188722 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d5f918e-f713-40d2-9a2b-447c1aff6963-apiservice-cert\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.188764 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a5a271e-1e58-4533-8c2a-d06653d75f2c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.190217 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c7bl\" (UniqueName: \"kubernetes.io/projected/5d8a4eb0-eb4c-4b60-b001-9aa831333db8-kube-api-access-7c7bl\") pod \"ingress-canary-jwzzn\" (UID: \"5d8a4eb0-eb4c-4b60-b001-9aa831333db8\") " pod="openshift-ingress-canary/ingress-canary-jwzzn" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.190246 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d3688b95-25a0-4f1c-87c7-dfb4d913a451-srv-cert\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.190280 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.190298 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d559938-4b8d-4e83-9ec0-66a384fc5842-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.193010 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.195827 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7207e17a-8352-4874-8fd7-dacd6763098f-config-volume\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.195916 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-bound-sa-token\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.195950 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64l74\" (UniqueName: \"kubernetes.io/projected/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-kube-api-access-64l74\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.195987 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/939979d1-277a-4157-bc3a-8f765bd38e45-certs\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.196078 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kz2s\" (UniqueName: \"kubernetes.io/projected/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-kube-api-access-2kz2s\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.196125 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2ed5e38-62e9-493a-8784-278a6d52fb2a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.196144 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gcwz\" (UniqueName: \"kubernetes.io/projected/5c553f4a-55fd-47f8-9c30-1cd4da3c70d8-kube-api-access-4gcwz\") pod \"package-server-manager-789f6589d5-cgndz\" (UID: \"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.196187 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fcbda467-98bc-40e1-a749-2f7ba3c9d331-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.196226 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-registration-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.199071 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-certificates\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200367 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-tls\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200408 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-proxy-tls\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200437 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200463 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d559938-4b8d-4e83-9ec0-66a384fc5842-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200488 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d5f918e-f713-40d2-9a2b-447c1aff6963-webhook-cert\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200511 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szwzp\" (UniqueName: \"kubernetes.io/projected/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-kube-api-access-szwzp\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200918 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200954 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czjrq\" (UniqueName: \"kubernetes.io/projected/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-kube-api-access-czjrq\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.200979 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d3688b95-25a0-4f1c-87c7-dfb4d913a451-profile-collector-cert\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.201026 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:04.700992436 +0000 UTC m=+146.937722703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.201069 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/05106619-f9ca-428e-9e93-30c9bc9c2218-proxy-tls\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.201113 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.201252 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-config\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.201309 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/05106619-f9ca-428e-9e93-30c9bc9c2218-images\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.201336 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d559938-4b8d-4e83-9ec0-66a384fc5842-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.201360 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fcbda467-98bc-40e1-a749-2f7ba3c9d331-srv-cert\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.201431 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnn4j\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-kube-api-access-mnn4j\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.201456 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5znz9\" (UniqueName: \"kubernetes.io/projected/05106619-f9ca-428e-9e93-30c9bc9c2218-kube-api-access-5znz9\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.203494 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.207356 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-trusted-ca\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.228007 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-tls\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.231230 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.236015 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-bound-sa-token\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.246462 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnn4j\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-kube-api-access-mnn4j\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302065 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/939979d1-277a-4157-bc3a-8f765bd38e45-node-bootstrap-token\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302112 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-mountpoint-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302132 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dltl\" (UniqueName: \"kubernetes.io/projected/f9db05ec-e04b-409c-9523-4b102664127d-kube-api-access-9dltl\") pod \"migrator-59844c95c7-dz64t\" (UID: \"f9db05ec-e04b-409c-9523-4b102664127d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302157 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ed5e38-62e9-493a-8784-278a6d52fb2a-config\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302178 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3d5f918e-f713-40d2-9a2b-447c1aff6963-tmpfs\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302214 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fc94g\" (UID: \"cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302236 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/05106619-f9ca-428e-9e93-30c9bc9c2218-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302253 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fftrc\" (UniqueName: \"kubernetes.io/projected/cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6-kube-api-access-fftrc\") pod \"control-plane-machine-set-operator-78cbb6b69f-fc94g\" (UID: \"cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302271 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5c553f4a-55fd-47f8-9c30-1cd4da3c70d8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cgndz\" (UID: \"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302290 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhvqv\" (UniqueName: \"kubernetes.io/projected/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-kube-api-access-fhvqv\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302308 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-metrics-certs\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302323 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8vmk\" (UniqueName: \"kubernetes.io/projected/2af0519d-fd20-46ec-a6d6-4c2532918731-kube-api-access-l8vmk\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302337 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-stats-auth\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302362 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9kwm\" (UniqueName: \"kubernetes.io/projected/4a5a271e-1e58-4533-8c2a-d06653d75f2c-kube-api-access-c9kwm\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302378 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-plugins-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302393 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-socket-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302410 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-csi-data-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302428 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdk4n\" (UniqueName: \"kubernetes.io/projected/3d5f918e-f713-40d2-9a2b-447c1aff6963-kube-api-access-kdk4n\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302446 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7207e17a-8352-4874-8fd7-dacd6763098f-metrics-tls\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302462 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-default-certificate\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302480 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c118e33-8f43-4a93-9fba-c4562daa381b-secret-volume\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302497 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302513 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-signing-key\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302527 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-service-ca-bundle\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.302543 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303547 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d5f918e-f713-40d2-9a2b-447c1aff6963-apiservice-cert\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303596 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a5a271e-1e58-4533-8c2a-d06653d75f2c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303619 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c7bl\" (UniqueName: \"kubernetes.io/projected/5d8a4eb0-eb4c-4b60-b001-9aa831333db8-kube-api-access-7c7bl\") pod \"ingress-canary-jwzzn\" (UID: \"5d8a4eb0-eb4c-4b60-b001-9aa831333db8\") " pod="openshift-ingress-canary/ingress-canary-jwzzn" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303642 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d3688b95-25a0-4f1c-87c7-dfb4d913a451-srv-cert\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303667 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d559938-4b8d-4e83-9ec0-66a384fc5842-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303685 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303701 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7207e17a-8352-4874-8fd7-dacd6763098f-config-volume\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303716 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64l74\" (UniqueName: \"kubernetes.io/projected/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-kube-api-access-64l74\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303732 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/939979d1-277a-4157-bc3a-8f765bd38e45-certs\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303748 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kz2s\" (UniqueName: \"kubernetes.io/projected/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-kube-api-access-2kz2s\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303772 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gcwz\" (UniqueName: \"kubernetes.io/projected/5c553f4a-55fd-47f8-9c30-1cd4da3c70d8-kube-api-access-4gcwz\") pod \"package-server-manager-789f6589d5-cgndz\" (UID: \"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303792 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2ed5e38-62e9-493a-8784-278a6d52fb2a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303807 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-registration-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303826 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fcbda467-98bc-40e1-a749-2f7ba3c9d331-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303848 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-proxy-tls\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303863 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303880 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d559938-4b8d-4e83-9ec0-66a384fc5842-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303897 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d5f918e-f713-40d2-9a2b-447c1aff6963-webhook-cert\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303915 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szwzp\" (UniqueName: \"kubernetes.io/projected/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-kube-api-access-szwzp\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303939 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303959 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czjrq\" (UniqueName: \"kubernetes.io/projected/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-kube-api-access-czjrq\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303976 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d3688b95-25a0-4f1c-87c7-dfb4d913a451-profile-collector-cert\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.303995 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304012 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/05106619-f9ca-428e-9e93-30c9bc9c2218-proxy-tls\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304035 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-config\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304050 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/05106619-f9ca-428e-9e93-30c9bc9c2218-images\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304072 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fcbda467-98bc-40e1-a749-2f7ba3c9d331-srv-cert\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304088 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d559938-4b8d-4e83-9ec0-66a384fc5842-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304104 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5znz9\" (UniqueName: \"kubernetes.io/projected/05106619-f9ca-428e-9e93-30c9bc9c2218-kube-api-access-5znz9\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304121 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304136 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-serving-cert\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304153 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c118e33-8f43-4a93-9fba-c4562daa381b-config-volume\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304169 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k57z2\" (UniqueName: \"kubernetes.io/projected/d3688b95-25a0-4f1c-87c7-dfb4d913a451-kube-api-access-k57z2\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304183 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d8a4eb0-eb4c-4b60-b001-9aa831333db8-cert\") pod \"ingress-canary-jwzzn\" (UID: \"5d8a4eb0-eb4c-4b60-b001-9aa831333db8\") " pod="openshift-ingress-canary/ingress-canary-jwzzn" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304210 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzctx\" (UniqueName: \"kubernetes.io/projected/939979d1-277a-4157-bc3a-8f765bd38e45-kube-api-access-bzctx\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304229 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-config\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304244 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-signing-cabundle\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304260 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a5a271e-1e58-4533-8c2a-d06653d75f2c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304281 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a2ed5e38-62e9-493a-8784-278a6d52fb2a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304300 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndkhk\" (UniqueName: \"kubernetes.io/projected/d8b9d5f4-186a-4646-ab32-0f3c63e23676-kube-api-access-ndkhk\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304317 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j9jq\" (UniqueName: \"kubernetes.io/projected/7207e17a-8352-4874-8fd7-dacd6763098f-kube-api-access-2j9jq\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304335 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wq4t\" (UniqueName: \"kubernetes.io/projected/9c118e33-8f43-4a93-9fba-c4562daa381b-kube-api-access-7wq4t\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304351 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.304367 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6dv9\" (UniqueName: \"kubernetes.io/projected/fcbda467-98bc-40e1-a749-2f7ba3c9d331-kube-api-access-n6dv9\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.305078 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-config\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.305187 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/05106619-f9ca-428e-9e93-30c9bc9c2218-images\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.305249 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7207e17a-8352-4874-8fd7-dacd6763098f-config-volume\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.306493 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d559938-4b8d-4e83-9ec0-66a384fc5842-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.308847 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.309486 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-mountpoint-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.310609 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-config\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.310802 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-signing-cabundle\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.311457 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2ed5e38-62e9-493a-8784-278a6d52fb2a-config\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.311597 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3d5f918e-f713-40d2-9a2b-447c1aff6963-tmpfs\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.311925 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:04.811911214 +0000 UTC m=+147.048641481 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.312275 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c118e33-8f43-4a93-9fba-c4562daa381b-config-volume\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.313139 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-registration-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.313304 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/05106619-f9ca-428e-9e93-30c9bc9c2218-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.314848 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.316358 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-plugins-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.316613 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.317329 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/939979d1-277a-4157-bc3a-8f765bd38e45-certs\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.317761 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3d5f918e-f713-40d2-9a2b-447c1aff6963-webhook-cert\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.318069 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a5a271e-1e58-4533-8c2a-d06653d75f2c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.318630 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fcbda467-98bc-40e1-a749-2f7ba3c9d331-srv-cert\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.318673 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/939979d1-277a-4157-bc3a-8f765bd38e45-node-bootstrap-token\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.319526 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a2ed5e38-62e9-493a-8784-278a6d52fb2a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.322910 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5c553f4a-55fd-47f8-9c30-1cd4da3c70d8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cgndz\" (UID: \"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.323082 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fc94g\" (UID: \"cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.323082 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-stats-auth\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.323417 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-default-certificate\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.323456 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-serving-cert\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.323516 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d559938-4b8d-4e83-9ec0-66a384fc5842-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.323770 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fcbda467-98bc-40e1-a749-2f7ba3c9d331-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.327858 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-proxy-tls\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.330671 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3d5f918e-f713-40d2-9a2b-447c1aff6963-apiservice-cert\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.334466 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d8a4eb0-eb4c-4b60-b001-9aa831333db8-cert\") pod \"ingress-canary-jwzzn\" (UID: \"5d8a4eb0-eb4c-4b60-b001-9aa831333db8\") " pod="openshift-ingress-canary/ingress-canary-jwzzn" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.337120 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d3688b95-25a0-4f1c-87c7-dfb4d913a451-profile-collector-cert\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.337345 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-socket-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.337410 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2af0519d-fd20-46ec-a6d6-4c2532918731-csi-data-dir\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.340213 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7207e17a-8352-4874-8fd7-dacd6763098f-metrics-tls\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.340815 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-service-ca-bundle\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.345358 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-metrics-certs\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.345431 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pslqt"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.345923 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.346056 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a5a271e-1e58-4533-8c2a-d06653d75f2c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.347864 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.347988 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c118e33-8f43-4a93-9fba-c4562daa381b-secret-volume\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.349088 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.351591 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-signing-key\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.351634 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/05106619-f9ca-428e-9e93-30c9bc9c2218-proxy-tls\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.351811 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d3688b95-25a0-4f1c-87c7-dfb4d913a451-srv-cert\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.352085 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6dv9\" (UniqueName: \"kubernetes.io/projected/fcbda467-98bc-40e1-a749-2f7ba3c9d331-kube-api-access-n6dv9\") pod \"olm-operator-6b444d44fb-4kzzs\" (UID: \"fcbda467-98bc-40e1-a749-2f7ba3c9d331\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: W1205 05:29:04.358215 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15cb8768_99e0_4907_af4d_0167fff40d3f.slice/crio-7825faf9291764667f13c1bb76e72a4d8c6c896d27c6deb7cc96342ce0b028ef WatchSource:0}: Error finding container 7825faf9291764667f13c1bb76e72a4d8c6c896d27c6deb7cc96342ce0b028ef: Status 404 returned error can't find the container with id 7825faf9291764667f13c1bb76e72a4d8c6c896d27c6deb7cc96342ce0b028ef Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.373472 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64l74\" (UniqueName: \"kubernetes.io/projected/f03783a0-28f4-48b5-9b19-5c9e3923e3fe-kube-api-access-64l74\") pod \"router-default-5444994796-cgsmk\" (UID: \"f03783a0-28f4-48b5-9b19-5c9e3923e3fe\") " pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.385055 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzctx\" (UniqueName: \"kubernetes.io/projected/939979d1-277a-4157-bc3a-8f765bd38e45-kube-api-access-bzctx\") pod \"machine-config-server-4fnfv\" (UID: \"939979d1-277a-4157-bc3a-8f765bd38e45\") " pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.395995 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-4fnfv" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.405342 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.405461 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5znz9\" (UniqueName: \"kubernetes.io/projected/05106619-f9ca-428e-9e93-30c9bc9c2218-kube-api-access-5znz9\") pod \"machine-config-operator-74547568cd-tjl2q\" (UID: \"05106619-f9ca-428e-9e93-30c9bc9c2218\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.405813 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:04.905784534 +0000 UTC m=+147.142514801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.406104 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.406469 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:04.906454795 +0000 UTC m=+147.143185062 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.428917 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k57z2\" (UniqueName: \"kubernetes.io/projected/d3688b95-25a0-4f1c-87c7-dfb4d913a451-kube-api-access-k57z2\") pod \"catalog-operator-68c6474976-286n5\" (UID: \"d3688b95-25a0-4f1c-87c7-dfb4d913a451\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.443237 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a2ed5e38-62e9-493a-8784-278a6d52fb2a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xxjmg\" (UID: \"a2ed5e38-62e9-493a-8784-278a6d52fb2a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.460161 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft"] Dec 05 05:29:04 crc kubenswrapper[4652]: W1205 05:29:04.460400 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod939979d1_277a_4157_bc3a_8f765bd38e45.slice/crio-44a72c370ec9aee2b32fa8b14481e8ed0ca2b1f98234ae8b98cf6d75d4881cab WatchSource:0}: Error finding container 44a72c370ec9aee2b32fa8b14481e8ed0ca2b1f98234ae8b98cf6d75d4881cab: Status 404 returned error can't find the container with id 44a72c370ec9aee2b32fa8b14481e8ed0ca2b1f98234ae8b98cf6d75d4881cab Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.465332 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j9jq\" (UniqueName: \"kubernetes.io/projected/7207e17a-8352-4874-8fd7-dacd6763098f-kube-api-access-2j9jq\") pod \"dns-default-7qc7b\" (UID: \"7207e17a-8352-4874-8fd7-dacd6763098f\") " pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.469277 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mvtf6"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.477669 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.501098 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dltl\" (UniqueName: \"kubernetes.io/projected/f9db05ec-e04b-409c-9523-4b102664127d-kube-api-access-9dltl\") pod \"migrator-59844c95c7-dz64t\" (UID: \"f9db05ec-e04b-409c-9523-4b102664127d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.507037 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.507899 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.007863191 +0000 UTC m=+147.244593459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.508102 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndkhk\" (UniqueName: \"kubernetes.io/projected/d8b9d5f4-186a-4646-ab32-0f3c63e23676-kube-api-access-ndkhk\") pod \"marketplace-operator-79b997595-5ksc5\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.508458 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.508788 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.008774336 +0000 UTC m=+147.245504604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: W1205 05:29:04.526010 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3440a79c_9036_4595_9967_ee9e5ae118c9.slice/crio-5cb23beefa5c1b892b43f2258a7e46bd30ca2fbcbad669a78732b101e62dc7f0 WatchSource:0}: Error finding container 5cb23beefa5c1b892b43f2258a7e46bd30ca2fbcbad669a78732b101e62dc7f0: Status 404 returned error can't find the container with id 5cb23beefa5c1b892b43f2258a7e46bd30ca2fbcbad669a78732b101e62dc7f0 Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.529110 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wq4t\" (UniqueName: \"kubernetes.io/projected/9c118e33-8f43-4a93-9fba-c4562daa381b-kube-api-access-7wq4t\") pod \"collect-profiles-29415195-7pk9g\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.544031 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czjrq\" (UniqueName: \"kubernetes.io/projected/5f0188c4-f23b-4b4d-89b2-b13a210ff2be-kube-api-access-czjrq\") pod \"service-ca-operator-777779d784-wbc4j\" (UID: \"5f0188c4-f23b-4b4d-89b2-b13a210ff2be\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.557672 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.570178 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szwzp\" (UniqueName: \"kubernetes.io/projected/6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8-kube-api-access-szwzp\") pod \"service-ca-9c57cc56f-6hfpm\" (UID: \"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.577274 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.590282 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.592751 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb11d23b-9cc6-4aeb-af41-fd70e19631b7-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-flpnc\" (UID: \"fb11d23b-9cc6-4aeb-af41-fd70e19631b7\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.602504 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.608113 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.609334 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.609779 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.109765698 +0000 UTC m=+147.346495965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.613010 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kz2s\" (UniqueName: \"kubernetes.io/projected/1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5-kube-api-access-2kz2s\") pod \"machine-config-controller-84d6567774-rhtqp\" (UID: \"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.614438 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.617688 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5d42v"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.627925 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.631169 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gcwz\" (UniqueName: \"kubernetes.io/projected/5c553f4a-55fd-47f8-9c30-1cd4da3c70d8-kube-api-access-4gcwz\") pod \"package-server-manager-789f6589d5-cgndz\" (UID: \"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.635180 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.642793 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:04 crc kubenswrapper[4652]: W1205 05:29:04.643252 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28425929_c82a_4d16_a278_1cf2786276e5.slice/crio-001d10fbea144382fdb0c823b0e18b77c3881883335a8c969cc8693f688082ed WatchSource:0}: Error finding container 001d10fbea144382fdb0c823b0e18b77c3881883335a8c969cc8693f688082ed: Status 404 returned error can't find the container with id 001d10fbea144382fdb0c823b0e18b77c3881883335a8c969cc8693f688082ed Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.646213 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhvqv\" (UniqueName: \"kubernetes.io/projected/74ef5354-bf2b-41bc-ab15-25b0eaf8618a-kube-api-access-fhvqv\") pod \"openshift-controller-manager-operator-756b6f6bc6-z6p5f\" (UID: \"74ef5354-bf2b-41bc-ab15-25b0eaf8618a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.653168 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7lwtl"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.662796 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.663187 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.666771 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fftrc\" (UniqueName: \"kubernetes.io/projected/cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6-kube-api-access-fftrc\") pod \"control-plane-machine-set-operator-78cbb6b69f-fc94g\" (UID: \"cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.671279 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.681680 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.689956 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9kwm\" (UniqueName: \"kubernetes.io/projected/4a5a271e-1e58-4533-8c2a-d06653d75f2c-kube-api-access-c9kwm\") pod \"kube-storage-version-migrator-operator-b67b599dd-rlt2r\" (UID: \"4a5a271e-1e58-4533-8c2a-d06653d75f2c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.691592 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.706075 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fjbx8"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.708418 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" event={"ID":"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60","Type":"ContainerStarted","Data":"93a34937c57c321af330abb52a5ab598325d3236ac445232abe243b6667551e5"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.708476 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" event={"ID":"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60","Type":"ContainerStarted","Data":"902db95b3f67acbe947a36b8d56d3d264a6726c104c215485c5edee3b21771c0"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.711434 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.711931 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mvtf6" event={"ID":"3440a79c-9036-4595-9967-ee9e5ae118c9","Type":"ContainerStarted","Data":"d15832988ff9d49e56f9cb7fb0d606ad41d14a29850d1b518cf0496b50836c6c"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.711962 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mvtf6" event={"ID":"3440a79c-9036-4595-9967-ee9e5ae118c9","Type":"ContainerStarted","Data":"5cb23beefa5c1b892b43f2258a7e46bd30ca2fbcbad669a78732b101e62dc7f0"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.712996 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.714003 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.213983791 +0000 UTC m=+147.450714057 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.728017 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdk4n\" (UniqueName: \"kubernetes.io/projected/3d5f918e-f713-40d2-9a2b-447c1aff6963-kube-api-access-kdk4n\") pod \"packageserver-d55dfcdfc-t95vx\" (UID: \"3d5f918e-f713-40d2-9a2b-447c1aff6963\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.730870 4652 patch_prober.go:28] interesting pod/console-operator-58897d9998-mvtf6 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.730917 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-mvtf6" podUID="3440a79c-9036-4595-9967-ee9e5ae118c9" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.744930 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" event={"ID":"7220a624-ba75-4a01-9bcc-71450af1e114","Type":"ContainerStarted","Data":"35294cc7bc7a6b74f2b5c8f0b0cf88f877d84f59897510c4fff6dd6146ea2d9d"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.744978 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" event={"ID":"7220a624-ba75-4a01-9bcc-71450af1e114","Type":"ContainerStarted","Data":"ba3bdad36e849c7cf04863a13bdfee0dcb1e37c4e125fe12af670d819eac5990"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.747481 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8vmk\" (UniqueName: \"kubernetes.io/projected/2af0519d-fd20-46ec-a6d6-4c2532918731-kube-api-access-l8vmk\") pod \"csi-hostpathplugin-26kkh\" (UID: \"2af0519d-fd20-46ec-a6d6-4c2532918731\") " pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.759233 4652 generic.go:334] "Generic (PLEG): container finished" podID="7dac1b70-3636-47c4-be0b-a798cb11a6e7" containerID="ff4d1b5d9a83f1db362bbc7bae3bfb8e96e1c9ec09e18dcb75f1375753c9e846" exitCode=0 Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.759384 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" event={"ID":"7dac1b70-3636-47c4-be0b-a798cb11a6e7","Type":"ContainerDied","Data":"ff4d1b5d9a83f1db362bbc7bae3bfb8e96e1c9ec09e18dcb75f1375753c9e846"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.759486 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" event={"ID":"7dac1b70-3636-47c4-be0b-a798cb11a6e7","Type":"ContainerStarted","Data":"ec0ca9f36eb397494c4796a80bb17465973ac0a78d995dd459e24afe9e773aea"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.765095 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" event={"ID":"c435a822-7efb-4d40-b374-9523f145f48a","Type":"ContainerStarted","Data":"3729ced9f016e8409e31266654ca3b37130312fa10ba26ce94c15890da895ffa"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.765141 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" event={"ID":"c435a822-7efb-4d40-b374-9523f145f48a","Type":"ContainerStarted","Data":"a0aa20a54d9eebaa5ea2da00cbe0238c6a1cced2a0d24803e7678133fc0e992d"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.769824 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.769837 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c7bl\" (UniqueName: \"kubernetes.io/projected/5d8a4eb0-eb4c-4b60-b001-9aa831333db8-kube-api-access-7c7bl\") pod \"ingress-canary-jwzzn\" (UID: \"5d8a4eb0-eb4c-4b60-b001-9aa831333db8\") " pod="openshift-ingress-canary/ingress-canary-jwzzn" Dec 05 05:29:04 crc kubenswrapper[4652]: W1205 05:29:04.785071 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc547d04_f064_455f_b8d3_1d5627a82ca4.slice/crio-c376a5deec1fda0e756a2191f39b102d17df273ed747294af61531447ddcdede WatchSource:0}: Error finding container c376a5deec1fda0e756a2191f39b102d17df273ed747294af61531447ddcdede: Status 404 returned error can't find the container with id c376a5deec1fda0e756a2191f39b102d17df273ed747294af61531447ddcdede Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.785128 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5mktp"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.785375 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" event={"ID":"41f2faf0-6ed1-4043-8687-13dd1ef0788b","Type":"ContainerStarted","Data":"7d172490a018072af2a94b2ce60ee7fa92c906e44e0a824ef2d19d9e36bfcd21"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.785396 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" event={"ID":"41f2faf0-6ed1-4043-8687-13dd1ef0788b","Type":"ContainerStarted","Data":"e73e5cd747e8b4fe1c4dfa891cd9d887d49625e7008da613a55e2e4f4fa50bde"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.785407 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" event={"ID":"41f2faf0-6ed1-4043-8687-13dd1ef0788b","Type":"ContainerStarted","Data":"fafe20dbad5eadbc4866cb1ea3da81990da4a6f4df7024d7ade516081f4332e8"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.786237 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-9nh89"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.788662 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" event={"ID":"d2679bc9-2b4f-4c24-9625-447a63d8ac59","Type":"ContainerStarted","Data":"6ca7c757acf57aee0281cddf932e01aedfd0a41bef2577fb0a65becdcc557273"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.797396 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d559938-4b8d-4e83-9ec0-66a384fc5842-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vkdqw\" (UID: \"8d559938-4b8d-4e83-9ec0-66a384fc5842\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.812691 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.814427 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.314403537 +0000 UTC m=+147.551133804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.821591 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7lwtl" event={"ID":"e26e158c-7bd6-45bd-be06-8cbe6c1d4912","Type":"ContainerStarted","Data":"4b9d6a8c4f8452fbc1c6adb5fcf9945e4451e7a8c4d66b3b80a6491d6531e8c3"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.830984 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" event={"ID":"15cb8768-99e0-4907-af4d-0167fff40d3f","Type":"ContainerStarted","Data":"7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.831030 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" event={"ID":"15cb8768-99e0-4907-af4d-0167fff40d3f","Type":"ContainerStarted","Data":"7825faf9291764667f13c1bb76e72a4d8c6c896d27c6deb7cc96342ce0b028ef"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.832210 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.832235 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j"] Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.838754 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" event={"ID":"3541b892-0b8c-4319-b4fa-b4d34cca9e18","Type":"ContainerStarted","Data":"50f90253a74d207dc695b15bd6d832cf26b7bb1e825b1c91d2453e3549ea495c"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.838782 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" event={"ID":"3541b892-0b8c-4319-b4fa-b4d34cca9e18","Type":"ContainerStarted","Data":"f564399d3a685f4d1a394ae816716b47894ae3f4ad61b6471ba7082ac6a228c4"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.838794 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" event={"ID":"3541b892-0b8c-4319-b4fa-b4d34cca9e18","Type":"ContainerStarted","Data":"04cbcc0807f01956d029b14c88385848a329c001566eb3d50a59b614339a88d1"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.847582 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.849849 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.870473 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.881918 4652 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-pslqt container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" start-of-body= Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.881954 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" podUID="15cb8768-99e0-4907-af4d-0167fff40d3f" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.884546 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.889328 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6a6fd8b618895c07e0c93ea7ca39e5d715683d18289ff2e27eacaa4b7597bfcb"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.897648 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"2c193d9c5420961c27645943d08721d52b8676a5c24eb586bb2b1c5642c18ff3"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.897853 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.899797 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-4fnfv" event={"ID":"939979d1-277a-4157-bc3a-8f765bd38e45","Type":"ContainerStarted","Data":"d6fd5ad831dcf64be42edabad48d3bce740a0b772501fb8c6f280ce4afa907a8"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.899855 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-4fnfv" event={"ID":"939979d1-277a-4157-bc3a-8f765bd38e45","Type":"ContainerStarted","Data":"44a72c370ec9aee2b32fa8b14481e8ed0ca2b1f98234ae8b98cf6d75d4881cab"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.915853 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.916736 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" event={"ID":"a2802700-9f5c-4987-905b-625784a96a37","Type":"ContainerStarted","Data":"883ba496b64043f5ddffe06b5c779118b2f9811a41ae80156a50fdae1bc1e1a7"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.916767 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" event={"ID":"a2802700-9f5c-4987-905b-625784a96a37","Type":"ContainerStarted","Data":"83b5138f0227941a4506e6ed93825877c12b3b5df1fc26f74c0e9596e081698d"} Dec 05 05:29:04 crc kubenswrapper[4652]: E1205 05:29:04.920231 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.420215047 +0000 UTC m=+147.656945305 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.920898 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.924677 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" event={"ID":"28425929-c82a-4d16-a278-1cf2786276e5","Type":"ContainerStarted","Data":"001d10fbea144382fdb0c823b0e18b77c3881883335a8c969cc8693f688082ed"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.932572 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-pjnkl" event={"ID":"f4bd4318-0406-40e2-8b50-b79c312bb10a","Type":"ContainerStarted","Data":"f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.932621 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-pjnkl" event={"ID":"f4bd4318-0406-40e2-8b50-b79c312bb10a","Type":"ContainerStarted","Data":"598e5af9201d9ee89551dc66cae596f3368ffab8ca016b9d0d36810283a61f8a"} Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.933803 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g"] Dec 05 05:29:04 crc kubenswrapper[4652]: W1205 05:29:04.937373 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f0188c4_f23b_4b4d_89b2_b13a210ff2be.slice/crio-c65eb5b81a5a608a813ca34dc1f7780254b9d761735616fa539dea2431a607ef WatchSource:0}: Error finding container c65eb5b81a5a608a813ca34dc1f7780254b9d761735616fa539dea2431a607ef: Status 404 returned error can't find the container with id c65eb5b81a5a608a813ca34dc1f7780254b9d761735616fa539dea2431a607ef Dec 05 05:29:04 crc kubenswrapper[4652]: W1205 05:29:04.941164 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8b5481d_4d3c_47e4_8e9d_c3e70ed49726.slice/crio-de3e5297db5ed79a1fcbc619d7fcfae6f0ccd528007f7ed84a5b96c3010d92d9 WatchSource:0}: Error finding container de3e5297db5ed79a1fcbc619d7fcfae6f0ccd528007f7ed84a5b96c3010d92d9: Status 404 returned error can't find the container with id de3e5297db5ed79a1fcbc619d7fcfae6f0ccd528007f7ed84a5b96c3010d92d9 Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.950214 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.987696 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-jwzzn" Dec 05 05:29:04 crc kubenswrapper[4652]: I1205 05:29:04.999277 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.016672 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-26kkh" Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.024234 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.025650 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.525630042 +0000 UTC m=+147.762360309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.026353 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.125997 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.126363 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.62634853 +0000 UTC m=+147.863078798 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.227088 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.227509 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.727491908 +0000 UTC m=+147.964222175 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.268911 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.310365 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.311632 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.329236 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.329672 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.829654042 +0000 UTC m=+148.066384310 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: W1205 05:29:05.394442 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2ed5e38_62e9_493a_8784_278a6d52fb2a.slice/crio-658f03fc46352aee72795204909663cd4ef963d8f9337d66d810494005f484d6 WatchSource:0}: Error finding container 658f03fc46352aee72795204909663cd4ef963d8f9337d66d810494005f484d6: Status 404 returned error can't find the container with id 658f03fc46352aee72795204909663cd4ef963d8f9337d66d810494005f484d6 Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.430075 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.430591 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:05.930576375 +0000 UTC m=+148.167306631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.512107 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5ksc5"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.533414 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.538582 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.038566172 +0000 UTC m=+148.275296439 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.544652 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.555167 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.608514 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.634823 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.635143 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.13512756 +0000 UTC m=+148.371857827 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.635547 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.635951 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.135931903 +0000 UTC m=+148.372662170 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.637571 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.642529 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6hfpm"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.642597 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.647542 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz"] Dec 05 05:29:05 crc kubenswrapper[4652]: W1205 05:29:05.647721 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05106619_f9ca_428e_9e93_30c9bc9c2218.slice/crio-d5441909749c594fd909f1f54b35a03f2b57793cb95ca113ad66591ca73e8946 WatchSource:0}: Error finding container d5441909749c594fd909f1f54b35a03f2b57793cb95ca113ad66591ca73e8946: Status 404 returned error can't find the container with id d5441909749c594fd909f1f54b35a03f2b57793cb95ca113ad66591ca73e8946 Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.658423 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7qc7b"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.737026 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.737586 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.23756905 +0000 UTC m=+148.474299317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.754906 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.774274 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.838657 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.839228 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.339215585 +0000 UTC m=+148.575945852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.880265 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.886289 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-jwzzn"] Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.939717 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.940168 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.440143196 +0000 UTC m=+148.676873463 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.940322 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:05 crc kubenswrapper[4652]: E1205 05:29:05.940783 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.440769205 +0000 UTC m=+148.677499472 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.954543 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" event={"ID":"4a5a271e-1e58-4533-8c2a-d06653d75f2c","Type":"ContainerStarted","Data":"7e747c9bb5824a6f96163a3ae6ce4a0cfb1d5f62646f89fc293601719533e29a"} Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.956257 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-cgsmk" event={"ID":"f03783a0-28f4-48b5-9b19-5c9e3923e3fe","Type":"ContainerStarted","Data":"c9d02848a561cc37c965e33573ae8dcd0d6713056bb349c5c96b2b2768d4c42a"} Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.956290 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-cgsmk" event={"ID":"f03783a0-28f4-48b5-9b19-5c9e3923e3fe","Type":"ContainerStarted","Data":"4e62eff4cead8152cfdcf2a5f25062e45033cbc70ad74be7c8a3917a64cd4c05"} Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.963217 4652 generic.go:334] "Generic (PLEG): container finished" podID="d2679bc9-2b4f-4c24-9625-447a63d8ac59" containerID="6cd7421958c69ca19ca8092d4f6c0a90ac111fb6d1c479790c4fe409cdd49009" exitCode=0 Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.963366 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" event={"ID":"d2679bc9-2b4f-4c24-9625-447a63d8ac59","Type":"ContainerDied","Data":"6cd7421958c69ca19ca8092d4f6c0a90ac111fb6d1c479790c4fe409cdd49009"} Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.968366 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" event={"ID":"fb11d23b-9cc6-4aeb-af41-fd70e19631b7","Type":"ContainerStarted","Data":"10fdf56704f9d8e88b8481e0971e30ea561de2078d7228959efab79a122d019f"} Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.980019 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" event={"ID":"d8b9d5f4-186a-4646-ab32-0f3c63e23676","Type":"ContainerStarted","Data":"d0acb04a8b4e2ee12d423b71eff6675c8c5a8f6fefabca8d8d0d3320598c7224"} Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.991007 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" event={"ID":"d3688b95-25a0-4f1c-87c7-dfb4d913a451","Type":"ContainerStarted","Data":"2b27a5f933b317a24571b9f3152ab9bb539d4e2f3206798d51cfd2ce4cc1dbba"} Dec 05 05:29:05 crc kubenswrapper[4652]: I1205 05:29:05.995926 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" event={"ID":"05106619-f9ca-428e-9e93-30c9bc9c2218","Type":"ContainerStarted","Data":"d5441909749c594fd909f1f54b35a03f2b57793cb95ca113ad66591ca73e8946"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.001463 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-26kkh"] Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.006299 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" event={"ID":"cc547d04-f064-455f-b8d3-1d5627a82ca4","Type":"ContainerStarted","Data":"f608825739f49917ea46d869d58ff00a51ade0c41d5ca242a3cac2f42eb05ac2"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.006329 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" event={"ID":"cc547d04-f064-455f-b8d3-1d5627a82ca4","Type":"ContainerStarted","Data":"c376a5deec1fda0e756a2191f39b102d17df273ed747294af61531447ddcdede"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.009447 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" event={"ID":"31d139be-8c3b-4ff9-9e9d-872906e7a547","Type":"ContainerStarted","Data":"632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.009477 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" event={"ID":"31d139be-8c3b-4ff9-9e9d-872906e7a547","Type":"ContainerStarted","Data":"d0c0e26e4f2711ac619b48134b7688792183ea6df1bc871bba3cc9562b8da274"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.010115 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.011404 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" event={"ID":"8ae7c30e-7f0b-44e1-89df-b5f8f20b9c60","Type":"ContainerStarted","Data":"dd6c9e612f3ea5fa63c22063c3432a4d21f6811217af822537746cdce16c91a1"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.012847 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" event={"ID":"a2ed5e38-62e9-493a-8784-278a6d52fb2a","Type":"ContainerStarted","Data":"658f03fc46352aee72795204909663cd4ef963d8f9337d66d810494005f484d6"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.013273 4652 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-5mktp container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.013303 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" podUID="31d139be-8c3b-4ff9-9e9d-872906e7a547" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.016859 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-jwzzn" event={"ID":"5d8a4eb0-eb4c-4b60-b001-9aa831333db8","Type":"ContainerStarted","Data":"03dcfe5f718a3ffa3b781a825fb3f1d980511f28739f560657dd2f1c5ff52d7c"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.020742 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" event={"ID":"6b251475-c037-4453-8f24-406781fabc44","Type":"ContainerStarted","Data":"bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.020768 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" event={"ID":"6b251475-c037-4453-8f24-406781fabc44","Type":"ContainerStarted","Data":"f8b1e25fe22ec7df8dc736cb6180ea92c77ace675b721cbb4355198375681410"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.021248 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.022479 4652 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9lzgg container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.022504 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" podUID="6b251475-c037-4453-8f24-406781fabc44" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.036301 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" event={"ID":"fcbda467-98bc-40e1-a749-2f7ba3c9d331","Type":"ContainerStarted","Data":"8446d1400cba4cf80e14763ed02abe1afe984ac3d6830b58fdf198131fe61214"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.039652 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" event={"ID":"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5","Type":"ContainerStarted","Data":"4b669367deba2d68f71224b6f9a931f837492d15cae38acf2f57a11e0fe1cdda"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.040711 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.041466 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.541441345 +0000 UTC m=+148.778171612 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.045730 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" event={"ID":"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8","Type":"ContainerStarted","Data":"3656a66047ec2cbc119681c4b3a5a7167a1a7b5d295c4c282e5cf0dd1a4f6377"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.055894 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" event={"ID":"7dac1b70-3636-47c4-be0b-a798cb11a6e7","Type":"ContainerStarted","Data":"18ed089374dfa434ad31ae202f8fae61972f4209f3c41012dad71137e2a46a66"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.057941 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" event={"ID":"f9db05ec-e04b-409c-9523-4b102664127d","Type":"ContainerStarted","Data":"ae69ad96dc9e1721fcd3cbd2bc3adce88ea4626ac900fc129d1ad6851e6d2057"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.059729 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" event={"ID":"28425929-c82a-4d16-a278-1cf2786276e5","Type":"ContainerStarted","Data":"b2f8929db96f8133494cfcbc05bcd275f017928e263bb1d46132cf664fda4982"} Dec 05 05:29:06 crc kubenswrapper[4652]: W1205 05:29:06.061446 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2af0519d_fd20_46ec_a6d6_4c2532918731.slice/crio-7c1211a557342db18173674a4d54e6ccb343ff4d9380c21ef32d0c883fd0f88f WatchSource:0}: Error finding container 7c1211a557342db18173674a4d54e6ccb343ff4d9380c21ef32d0c883fd0f88f: Status 404 returned error can't find the container with id 7c1211a557342db18173674a4d54e6ccb343ff4d9380c21ef32d0c883fd0f88f Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.065281 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7lwtl" event={"ID":"e26e158c-7bd6-45bd-be06-8cbe6c1d4912","Type":"ContainerStarted","Data":"89d0d108fdcadb1edb05b131439ad33265696e5b794f2bd4e9a9f21d8d560f14"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.066168 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7lwtl" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.066949 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" event={"ID":"cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6","Type":"ContainerStarted","Data":"ccfdff1d29e3ac1de13e7c289a5a9ed2e8f8e731dbee76db26b8196258f79238"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.069943 4652 generic.go:334] "Generic (PLEG): container finished" podID="f8b5481d-4d3c-47e4-8e9d-c3e70ed49726" containerID="96e24e6cdb01638180642b327f973eaab2e4783c304b7aa989da4109bae20c0d" exitCode=0 Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.069991 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" event={"ID":"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726","Type":"ContainerDied","Data":"96e24e6cdb01638180642b327f973eaab2e4783c304b7aa989da4109bae20c0d"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.070010 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" event={"ID":"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726","Type":"ContainerStarted","Data":"de3e5297db5ed79a1fcbc619d7fcfae6f0ccd528007f7ed84a5b96c3010d92d9"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.071145 4652 patch_prober.go:28] interesting pod/downloads-7954f5f757-7lwtl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.071165 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7lwtl" podUID="e26e158c-7bd6-45bd-be06-8cbe6c1d4912" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.074307 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" event={"ID":"5f0188c4-f23b-4b4d-89b2-b13a210ff2be","Type":"ContainerStarted","Data":"6927996e0e2f2847f4997b273eb31f4600b5b42c72b641413c8c16a29160a34b"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.074335 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" event={"ID":"5f0188c4-f23b-4b4d-89b2-b13a210ff2be","Type":"ContainerStarted","Data":"c65eb5b81a5a608a813ca34dc1f7780254b9d761735616fa539dea2431a607ef"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.075192 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" event={"ID":"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8","Type":"ContainerStarted","Data":"93949e4f124e26b23fba9cb70fc603bd86249a414b774f63017f04e304ee583b"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.077155 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" event={"ID":"a2802700-9f5c-4987-905b-625784a96a37","Type":"ContainerStarted","Data":"15dba3563c3cab9ad71286ac741c4f64b5ed23b69bdc8df3cfc3f66a296eb4f0"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.082264 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" event={"ID":"60243fa4-404c-47cc-b17e-0a338bb89b54","Type":"ContainerStarted","Data":"d8300ef461ec1dafad0c00d0b2d1212e4cee4f07479a823f0b269014c2a45265"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.082317 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" event={"ID":"60243fa4-404c-47cc-b17e-0a338bb89b54","Type":"ContainerStarted","Data":"68a733ea273e0c6f59f66fe8a36f1853e167dbf417570a0a88c132a79f414cde"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.085178 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" event={"ID":"d37a7d0d-0a12-40d2-b970-90394f2c11a8","Type":"ContainerStarted","Data":"621cea25a9270eb62af457e56a280ca8b616ae4a9fb8243eed4f7ae4b978d605"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.085213 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" event={"ID":"d37a7d0d-0a12-40d2-b970-90394f2c11a8","Type":"ContainerStarted","Data":"6d30004359fcccca9d41c3314457843f6741662d708d02912fa0d91aaf75d0fb"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.105130 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" event={"ID":"3d5f918e-f713-40d2-9a2b-447c1aff6963","Type":"ContainerStarted","Data":"301286fea3d4337a12bafe1e432173a185f6ab42b5c2a3cea346a196b42a5b6f"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.106686 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" event={"ID":"74ef5354-bf2b-41bc-ab15-25b0eaf8618a","Type":"ContainerStarted","Data":"dd63ffc69901802fed4c5cd386156c38c5bb37d63765e39c1fe0c422830a5cef"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.106709 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" event={"ID":"74ef5354-bf2b-41bc-ab15-25b0eaf8618a","Type":"ContainerStarted","Data":"25373285c1df9889f76f415a2dba0dd9628c35fb4654aa20d8e1917faad6fb68"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.115594 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7qc7b" event={"ID":"7207e17a-8352-4874-8fd7-dacd6763098f","Type":"ContainerStarted","Data":"a43b8f5b969c97112a7021c64140558196bb565bf02c0dde2465d58af336c263"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.143531 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.145160 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.645149629 +0000 UTC m=+148.881879896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.160146 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-mvtf6" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.160176 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.160190 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" event={"ID":"9c118e33-8f43-4a93-9fba-c4562daa381b","Type":"ContainerStarted","Data":"fb686635f7cc30085bf6cea23a2ff2952cb2b43881aca3905b73be536d3cb588"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.160211 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" event={"ID":"9c118e33-8f43-4a93-9fba-c4562daa381b","Type":"ContainerStarted","Data":"681c1ff55ff05d019d64f221b4632ab23ec6f7d9896240b878e4293d886494ad"} Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.244032 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-gdjlr" podStartSLOduration=125.244012486 podStartE2EDuration="2m5.244012486s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.241521139 +0000 UTC m=+148.478251406" watchObservedRunningTime="2025-12-05 05:29:06.244012486 +0000 UTC m=+148.480742753" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.245127 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.258702 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.758608216 +0000 UTC m=+148.995338484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.331190 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-5d42v" podStartSLOduration=124.331169771 podStartE2EDuration="2m4.331169771s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.320119051 +0000 UTC m=+148.556849318" watchObservedRunningTime="2025-12-05 05:29:06.331169771 +0000 UTC m=+148.567900039" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.359442 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.359838 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.859822758 +0000 UTC m=+149.096553025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.433898 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" podStartSLOduration=125.433878924 podStartE2EDuration="2m5.433878924s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.432867351 +0000 UTC m=+148.669597618" watchObservedRunningTime="2025-12-05 05:29:06.433878924 +0000 UTC m=+148.670609192" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.467055 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.467512 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:06.967497413 +0000 UTC m=+149.204227680 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.506914 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-mvtf6" podStartSLOduration=124.506899404 podStartE2EDuration="2m4.506899404s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.499692293 +0000 UTC m=+148.736422561" watchObservedRunningTime="2025-12-05 05:29:06.506899404 +0000 UTC m=+148.743629670" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.568499 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.568796 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.068784331 +0000 UTC m=+149.305514599 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.569378 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" podStartSLOduration=124.569366957 podStartE2EDuration="2m4.569366957s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.569169095 +0000 UTC m=+148.805899362" watchObservedRunningTime="2025-12-05 05:29:06.569366957 +0000 UTC m=+148.806097224" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.578792 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.588705 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:06 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:06 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:06 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.588741 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.589783 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bm6ft" podStartSLOduration=124.589771465 podStartE2EDuration="2m4.589771465s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.589301701 +0000 UTC m=+148.826031969" watchObservedRunningTime="2025-12-05 05:29:06.589771465 +0000 UTC m=+148.826501733" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.635504 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jgztd" podStartSLOduration=125.635480882 podStartE2EDuration="2m5.635480882s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.629853325 +0000 UTC m=+148.866583591" watchObservedRunningTime="2025-12-05 05:29:06.635480882 +0000 UTC m=+148.872211139" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.669523 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.670240 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.170224608 +0000 UTC m=+149.406954875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.707420 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wbc4j" podStartSLOduration=124.707395379 podStartE2EDuration="2m4.707395379s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.671795152 +0000 UTC m=+148.908525419" watchObservedRunningTime="2025-12-05 05:29:06.707395379 +0000 UTC m=+148.944125647" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.744118 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-cgsmk" podStartSLOduration=124.74409739 podStartE2EDuration="2m4.74409739s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.712926947 +0000 UTC m=+148.949657224" watchObservedRunningTime="2025-12-05 05:29:06.74409739 +0000 UTC m=+148.980827657" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.744810 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-z6p5f" podStartSLOduration=124.744804089 podStartE2EDuration="2m4.744804089s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.744232834 +0000 UTC m=+148.980963101" watchObservedRunningTime="2025-12-05 05:29:06.744804089 +0000 UTC m=+148.981534357" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.776274 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.776647 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.276636458 +0000 UTC m=+149.513366715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.840455 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lp26s" podStartSLOduration=125.840435397 podStartE2EDuration="2m5.840435397s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.799583519 +0000 UTC m=+149.036313786" watchObservedRunningTime="2025-12-05 05:29:06.840435397 +0000 UTC m=+149.077165664" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.842580 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-4fnfv" podStartSLOduration=5.842528594 podStartE2EDuration="5.842528594s" podCreationTimestamp="2025-12-05 05:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.840729891 +0000 UTC m=+149.077460158" watchObservedRunningTime="2025-12-05 05:29:06.842528594 +0000 UTC m=+149.079258862" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.878535 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.879223 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.379204715 +0000 UTC m=+149.615934983 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.932583 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-6cfcs" podStartSLOduration=124.932549988 podStartE2EDuration="2m4.932549988s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.930646446 +0000 UTC m=+149.167376704" watchObservedRunningTime="2025-12-05 05:29:06.932549988 +0000 UTC m=+149.169280255" Dec 05 05:29:06 crc kubenswrapper[4652]: I1205 05:29:06.982666 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:06 crc kubenswrapper[4652]: E1205 05:29:06.983040 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.483025331 +0000 UTC m=+149.719755598 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.047886 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4z8lw" podStartSLOduration=126.047869466 podStartE2EDuration="2m6.047869466s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.046545766 +0000 UTC m=+149.283276033" watchObservedRunningTime="2025-12-05 05:29:07.047869466 +0000 UTC m=+149.284599733" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.049712 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-4kx7t" podStartSLOduration=125.049701673 podStartE2EDuration="2m5.049701673s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:06.96774318 +0000 UTC m=+149.204473446" watchObservedRunningTime="2025-12-05 05:29:07.049701673 +0000 UTC m=+149.286431941" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.087187 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.087352 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.587328153 +0000 UTC m=+149.824058420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.087617 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.088078 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.58806561 +0000 UTC m=+149.824795877 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.099434 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7lwtl" podStartSLOduration=125.099415253 podStartE2EDuration="2m5.099415253s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.097427553 +0000 UTC m=+149.334157820" watchObservedRunningTime="2025-12-05 05:29:07.099415253 +0000 UTC m=+149.336145520" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.157479 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" event={"ID":"d37a7d0d-0a12-40d2-b970-90394f2c11a8","Type":"ContainerStarted","Data":"0780e15ea5fe829777080fbaa8a702270028084bf991eae081ba7cde0b9f2ebb"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.161585 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" podStartSLOduration=125.161566762 podStartE2EDuration="2m5.161566762s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.126666031 +0000 UTC m=+149.363396298" watchObservedRunningTime="2025-12-05 05:29:07.161566762 +0000 UTC m=+149.398297030" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.170950 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-26kkh" event={"ID":"2af0519d-fd20-46ec-a6d6-4c2532918731","Type":"ContainerStarted","Data":"7c1211a557342db18173674a4d54e6ccb343ff4d9380c21ef32d0c883fd0f88f"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.174164 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" event={"ID":"8d559938-4b8d-4e83-9ec0-66a384fc5842","Type":"ContainerStarted","Data":"3fca842706a5bc9d57e3e3b13c346cff62c0a2ab9d459568d953f92b2e3c29e3"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.174225 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" event={"ID":"8d559938-4b8d-4e83-9ec0-66a384fc5842","Type":"ContainerStarted","Data":"8d9e68f3529decb8f4eb4abf2b9fa957add4790f1537a1accd2a4781fa801018"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.183624 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" event={"ID":"cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6","Type":"ContainerStarted","Data":"6b2a32e107faa54b2a99787a91d58023f0d8fdb412a2324e31fccd55fead8984"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.188772 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.189418 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.689385651 +0000 UTC m=+149.926115917 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.200008 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-pjnkl" podStartSLOduration=125.199996914 podStartE2EDuration="2m5.199996914s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.158782504 +0000 UTC m=+149.395512762" watchObservedRunningTime="2025-12-05 05:29:07.199996914 +0000 UTC m=+149.436727181" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.200384 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" event={"ID":"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8","Type":"ContainerStarted","Data":"e9bd61458962e607fbd9db81b17667dc9c77d630f4a78da0098fc14f716b474a"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.200409 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" event={"ID":"5c553f4a-55fd-47f8-9c30-1cd4da3c70d8","Type":"ContainerStarted","Data":"8fedd610ff91024fcb54f8325f6aace2124c255be7405a384bb1c0662e58b430"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.201870 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.209368 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" event={"ID":"4a5a271e-1e58-4533-8c2a-d06653d75f2c","Type":"ContainerStarted","Data":"9b53c1a790226b1fb933a3ba2727fb4d9a860d49db63ddaa923d5cb1a97fa61a"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.211542 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" event={"ID":"cc547d04-f064-455f-b8d3-1d5627a82ca4","Type":"ContainerStarted","Data":"8fa3850dba56505a8a43f61045394106b158951bc25079dab164313dde1f1f2d"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.229400 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" event={"ID":"fcbda467-98bc-40e1-a749-2f7ba3c9d331","Type":"ContainerStarted","Data":"9700fd70a91b6fdeca95637ee483e8fce423e212556eb5f68e5f6f452c3decb8"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.229442 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.241819 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" podStartSLOduration=125.241800702 podStartE2EDuration="2m5.241800702s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.240176106 +0000 UTC m=+149.476906373" watchObservedRunningTime="2025-12-05 05:29:07.241800702 +0000 UTC m=+149.478530959" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.243628 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" podStartSLOduration=126.2436171 podStartE2EDuration="2m6.2436171s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.200802009 +0000 UTC m=+149.437532276" watchObservedRunningTime="2025-12-05 05:29:07.2436171 +0000 UTC m=+149.480347367" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.248045 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" event={"ID":"f8b5481d-4d3c-47e4-8e9d-c3e70ed49726","Type":"ContainerStarted","Data":"559b1a28115bc9dc8f932fb5f985c21eecb89646d74ac1ba1a10690c69302ba2"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.248532 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.249829 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" event={"ID":"d3688b95-25a0-4f1c-87c7-dfb4d913a451","Type":"ContainerStarted","Data":"19bb9f157fb151f08bdb2f80a82efbb6782f81b55a8df4248ae75d26f21e4012"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.250342 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.251935 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" event={"ID":"05106619-f9ca-428e-9e93-30c9bc9c2218","Type":"ContainerStarted","Data":"19d2caf3db03b1b35ab5c6dca0ee0f20beefc32b443ae9ad086ac159d25f4238"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.251961 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" event={"ID":"05106619-f9ca-428e-9e93-30c9bc9c2218","Type":"ContainerStarted","Data":"dc9090cc2a479ffe1ccfe6651a9c74c5074d67f780b6991625b73b32d2775316"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.252332 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.257652 4652 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-286n5 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.257692 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" podUID="d3688b95-25a0-4f1c-87c7-dfb4d913a451" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.268859 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fc94g" podStartSLOduration=125.268842979 podStartE2EDuration="2m5.268842979s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.267788926 +0000 UTC m=+149.504519193" watchObservedRunningTime="2025-12-05 05:29:07.268842979 +0000 UTC m=+149.505573246" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.272389 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" event={"ID":"f9db05ec-e04b-409c-9523-4b102664127d","Type":"ContainerStarted","Data":"d6089209438460b74bc1145229aac159f1a222bf5aeec2a493b47e367c08cf78"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.293010 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.294475 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.794462749 +0000 UTC m=+150.031193016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.301289 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" event={"ID":"d2679bc9-2b4f-4c24-9625-447a63d8ac59","Type":"ContainerStarted","Data":"c90d4b33365ddf15e41a21eda155c67e99df6ccec14041e15182ef3d90794cb2"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.301291 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dtnxw" podStartSLOduration=125.301279824 podStartE2EDuration="2m5.301279824s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.301090478 +0000 UTC m=+149.537820745" watchObservedRunningTime="2025-12-05 05:29:07.301279824 +0000 UTC m=+149.538010091" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.313341 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" event={"ID":"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5","Type":"ContainerStarted","Data":"51cb079769f607bf48ea47f38292b870fda691296a354dc008ad42528b043e23"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.343721 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" event={"ID":"a2ed5e38-62e9-493a-8784-278a6d52fb2a","Type":"ContainerStarted","Data":"0ed1ce704a382adb7cbdf8abfce5926266af0bc8a2a00d20affcae4e34c4a79d"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.364128 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" event={"ID":"6fd8097a-ec17-417c-b4b8-eeb5d07cc9e8","Type":"ContainerStarted","Data":"d996a51b51390636c5bc58b9201d446de71acbbb3a8a6087a62b89263da7578c"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.371072 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-jwzzn" event={"ID":"5d8a4eb0-eb4c-4b60-b001-9aa831333db8","Type":"ContainerStarted","Data":"12dc8f5014c66410da7e39b6e01c9f6f62bd916532b6ba096ed782f72c87626c"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.394089 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.395289 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:07.895271087 +0000 UTC m=+150.132001354 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.395985 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" event={"ID":"7dac1b70-3636-47c4-be0b-a798cb11a6e7","Type":"ContainerStarted","Data":"4142fd4d22bef6576618fa672d125bfeab37eeead35b539401d00ffc04c496f3"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.417139 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" event={"ID":"fb11d23b-9cc6-4aeb-af41-fd70e19631b7","Type":"ContainerStarted","Data":"089ddf3075c2df9e9a01c9c49fb191ee077fc0a33f8d6a1e374280525cc20a09"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.427054 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" event={"ID":"3d5f918e-f713-40d2-9a2b-447c1aff6963","Type":"ContainerStarted","Data":"bc7fe7bcccadf12d620ab522aae1bb9ff8039aab91b76a82f775b3e0f171189d"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.427725 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.436164 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vkdqw" podStartSLOduration=125.436149424 podStartE2EDuration="2m5.436149424s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.377760974 +0000 UTC m=+149.614491240" watchObservedRunningTime="2025-12-05 05:29:07.436149424 +0000 UTC m=+149.672879691" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.443659 4652 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-t95vx container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:5443/healthz\": dial tcp 10.217.0.41:5443: connect: connection refused" start-of-body= Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.443711 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" podUID="3d5f918e-f713-40d2-9a2b-447c1aff6963" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.41:5443/healthz\": dial tcp 10.217.0.41:5443: connect: connection refused" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.444082 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" event={"ID":"d8b9d5f4-186a-4646-ab32-0f3c63e23676","Type":"ContainerStarted","Data":"7d0f6a050c33c2ffdbc2c6d056a0e7f630d39652f1da07218a0024fbd8b47d58"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.444935 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.456710 4652 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5ksc5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.456772 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.463127 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7qc7b" event={"ID":"7207e17a-8352-4874-8fd7-dacd6763098f","Type":"ContainerStarted","Data":"3b9c82a826c88385e92e12bd6978c5bbfcb08f1ecadf6869d06b626f27b9c069"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.477346 4652 generic.go:334] "Generic (PLEG): container finished" podID="9c118e33-8f43-4a93-9fba-c4562daa381b" containerID="fb686635f7cc30085bf6cea23a2ff2952cb2b43881aca3905b73be536d3cb588" exitCode=0 Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.477651 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" event={"ID":"9c118e33-8f43-4a93-9fba-c4562daa381b","Type":"ContainerDied","Data":"fb686635f7cc30085bf6cea23a2ff2952cb2b43881aca3905b73be536d3cb588"} Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.478501 4652 patch_prober.go:28] interesting pod/downloads-7954f5f757-7lwtl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.478529 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7lwtl" podUID="e26e158c-7bd6-45bd-be06-8cbe6c1d4912" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.496082 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.497086 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.498690 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xxjmg" podStartSLOduration=125.49867692 podStartE2EDuration="2m5.49867692s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.49727322 +0000 UTC m=+149.734003486" watchObservedRunningTime="2025-12-05 05:29:07.49867692 +0000 UTC m=+149.735407187" Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.500155 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.000139522 +0000 UTC m=+150.236869789 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.500188 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" podStartSLOduration=125.500179738 podStartE2EDuration="2m5.500179738s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.435094128 +0000 UTC m=+149.671824395" watchObservedRunningTime="2025-12-05 05:29:07.500179738 +0000 UTC m=+149.736910005" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.509959 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.541535 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" podStartSLOduration=125.541524974 podStartE2EDuration="2m5.541524974s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.539060897 +0000 UTC m=+149.775791164" watchObservedRunningTime="2025-12-05 05:29:07.541524974 +0000 UTC m=+149.778255241" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.591147 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" podStartSLOduration=125.591131111 podStartE2EDuration="2m5.591131111s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.589351574 +0000 UTC m=+149.826081840" watchObservedRunningTime="2025-12-05 05:29:07.591131111 +0000 UTC m=+149.827861368" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.596475 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:07 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:07 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:07 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.596685 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.604574 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.605802 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.105786834 +0000 UTC m=+150.342517101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.622427 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-fjbx8" podStartSLOduration=125.622403837 podStartE2EDuration="2m5.622403837s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.621105423 +0000 UTC m=+149.857835691" watchObservedRunningTime="2025-12-05 05:29:07.622403837 +0000 UTC m=+149.859134104" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.666544 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-jwzzn" podStartSLOduration=6.666527258 podStartE2EDuration="6.666527258s" podCreationTimestamp="2025-12-05 05:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.666462728 +0000 UTC m=+149.903192984" watchObservedRunningTime="2025-12-05 05:29:07.666527258 +0000 UTC m=+149.903257526" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.706983 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.707322 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.207310126 +0000 UTC m=+150.444040394 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.715663 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-rlt2r" podStartSLOduration=125.715644377 podStartE2EDuration="2m5.715644377s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.713026461 +0000 UTC m=+149.949756717" watchObservedRunningTime="2025-12-05 05:29:07.715644377 +0000 UTC m=+149.952374643" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.774885 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4kzzs" podStartSLOduration=125.77485314 podStartE2EDuration="2m5.77485314s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.773804397 +0000 UTC m=+150.010534653" watchObservedRunningTime="2025-12-05 05:29:07.77485314 +0000 UTC m=+150.011583396" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.807758 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.808457 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.308441562 +0000 UTC m=+150.545171828 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.842521 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" podStartSLOduration=125.84250102 podStartE2EDuration="2m5.84250102s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.796943049 +0000 UTC m=+150.033673306" watchObservedRunningTime="2025-12-05 05:29:07.84250102 +0000 UTC m=+150.079231277" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.843813 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tjl2q" podStartSLOduration=125.843804543 podStartE2EDuration="2m5.843804543s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.841262299 +0000 UTC m=+150.077992567" watchObservedRunningTime="2025-12-05 05:29:07.843804543 +0000 UTC m=+150.080534810" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.868671 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" podStartSLOduration=125.868653092 podStartE2EDuration="2m5.868653092s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.866753287 +0000 UTC m=+150.103483554" watchObservedRunningTime="2025-12-05 05:29:07.868653092 +0000 UTC m=+150.105383349" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.902787 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-6hfpm" podStartSLOduration=125.90276571 podStartE2EDuration="2m5.90276571s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.890453966 +0000 UTC m=+150.127184224" watchObservedRunningTime="2025-12-05 05:29:07.90276571 +0000 UTC m=+150.139495967" Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.910399 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:07 crc kubenswrapper[4652]: E1205 05:29:07.910747 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.410735163 +0000 UTC m=+150.647465430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:07 crc kubenswrapper[4652]: I1205 05:29:07.967279 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" podStartSLOduration=125.967259918 podStartE2EDuration="2m5.967259918s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.964990047 +0000 UTC m=+150.201720314" watchObservedRunningTime="2025-12-05 05:29:07.967259918 +0000 UTC m=+150.203990184" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.001007 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" podStartSLOduration=126.000984916 podStartE2EDuration="2m6.000984916s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:07.999886711 +0000 UTC m=+150.236616978" watchObservedRunningTime="2025-12-05 05:29:08.000984916 +0000 UTC m=+150.237715183" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.014085 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.014292 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.514258158 +0000 UTC m=+150.750988426 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.014436 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.014741 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.514728393 +0000 UTC m=+150.751458661 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.056629 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" podStartSLOduration=127.056613214 podStartE2EDuration="2m7.056613214s" podCreationTimestamp="2025-12-05 05:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:08.055299933 +0000 UTC m=+150.292030200" watchObservedRunningTime="2025-12-05 05:29:08.056613214 +0000 UTC m=+150.293343481" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.115090 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.115272 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.615247687 +0000 UTC m=+150.851977954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.115352 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.115691 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.61566418 +0000 UTC m=+150.852394447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.131976 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-flpnc" podStartSLOduration=126.131962694 podStartE2EDuration="2m6.131962694s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:08.13007363 +0000 UTC m=+150.366803897" watchObservedRunningTime="2025-12-05 05:29:08.131962694 +0000 UTC m=+150.368692961" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.216001 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.216172 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.716150993 +0000 UTC m=+150.952881260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.216254 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.216676 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.71665988 +0000 UTC m=+150.953390148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.317674 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.317845 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.817819218 +0000 UTC m=+151.054549485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.318658 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.318976 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.818967008 +0000 UTC m=+151.055697274 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.419447 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.419686 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.919658225 +0000 UTC m=+151.156388492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.419827 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.420129 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:08.920115245 +0000 UTC m=+151.156845512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.502477 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7qc7b" event={"ID":"7207e17a-8352-4874-8fd7-dacd6763098f","Type":"ContainerStarted","Data":"c8245bc58916c61272b915160da3e69e4126fcd2122a6edf255d5d3078964888"} Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.503344 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.511932 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rhtqp" event={"ID":"1b7b7ed1-593b-4d0a-846e-82d3db6fd3b5","Type":"ContainerStarted","Data":"29792b284cae7a2adab3f5ef9e35b076ff84ae349af0e5e5e1de3c079887729b"} Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.517970 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dz64t" event={"ID":"f9db05ec-e04b-409c-9523-4b102664127d","Type":"ContainerStarted","Data":"02030a00fcd16a78ae37071e07a3065b3afae9ed9b8f4069da5cf06cfd791092"} Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.520794 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.521248 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.021180656 +0000 UTC m=+151.257910923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.522351 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-26kkh" event={"ID":"2af0519d-fd20-46ec-a6d6-4c2532918731","Type":"ContainerStarted","Data":"04810bf5b5f1f7dd657ca026e1aa00bec04bdc0c78d5e5558371de329fdd749f"} Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.522383 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-26kkh" event={"ID":"2af0519d-fd20-46ec-a6d6-4c2532918731","Type":"ContainerStarted","Data":"7899bd53d183f5387564976f1ea12c44b2507bf70c5152ae31838ce9c19777fc"} Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.530722 4652 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5ksc5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.530764 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.532373 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-286n5" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.576584 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-7qc7b" podStartSLOduration=7.57656828 podStartE2EDuration="7.57656828s" podCreationTimestamp="2025-12-05 05:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:08.558158385 +0000 UTC m=+150.794888652" watchObservedRunningTime="2025-12-05 05:29:08.57656828 +0000 UTC m=+150.813298547" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.582700 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:08 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:08 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:08 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.582745 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.623835 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.632652 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.132618521 +0000 UTC m=+151.369348788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.717987 4652 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.725243 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.725617 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.225586147 +0000 UTC m=+151.462316415 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.733478 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.733754 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.752348 4652 patch_prober.go:28] interesting pod/apiserver-76f77b778f-hb9pn container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]log ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]etcd ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/generic-apiserver-start-informers ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/max-in-flight-filter ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 05 05:29:08 crc kubenswrapper[4652]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 05 05:29:08 crc kubenswrapper[4652]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/project.openshift.io-projectcache ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/openshift.io-startinformers ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 05 05:29:08 crc kubenswrapper[4652]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 05 05:29:08 crc kubenswrapper[4652]: livez check failed Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.752417 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" podUID="7dac1b70-3636-47c4-be0b-a798cb11a6e7" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.829151 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.829990 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.32997405 +0000 UTC m=+151.566704317 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.929128 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.929904 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.929992 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:08 crc kubenswrapper[4652]: E1205 05:29:08.930327 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.430312212 +0000 UTC m=+151.667042479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.943826 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:08 crc kubenswrapper[4652]: I1205 05:29:08.959677 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.030749 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wq4t\" (UniqueName: \"kubernetes.io/projected/9c118e33-8f43-4a93-9fba-c4562daa381b-kube-api-access-7wq4t\") pod \"9c118e33-8f43-4a93-9fba-c4562daa381b\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.030817 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c118e33-8f43-4a93-9fba-c4562daa381b-config-volume\") pod \"9c118e33-8f43-4a93-9fba-c4562daa381b\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.030949 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c118e33-8f43-4a93-9fba-c4562daa381b-secret-volume\") pod \"9c118e33-8f43-4a93-9fba-c4562daa381b\" (UID: \"9c118e33-8f43-4a93-9fba-c4562daa381b\") " Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.031211 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:09 crc kubenswrapper[4652]: E1205 05:29:09.031612 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.531599321 +0000 UTC m=+151.768329588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.032482 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c118e33-8f43-4a93-9fba-c4562daa381b-config-volume" (OuterVolumeSpecName: "config-volume") pod "9c118e33-8f43-4a93-9fba-c4562daa381b" (UID: "9c118e33-8f43-4a93-9fba-c4562daa381b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.040873 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c118e33-8f43-4a93-9fba-c4562daa381b-kube-api-access-7wq4t" (OuterVolumeSpecName: "kube-api-access-7wq4t") pod "9c118e33-8f43-4a93-9fba-c4562daa381b" (UID: "9c118e33-8f43-4a93-9fba-c4562daa381b"). InnerVolumeSpecName "kube-api-access-7wq4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.046018 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c118e33-8f43-4a93-9fba-c4562daa381b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9c118e33-8f43-4a93-9fba-c4562daa381b" (UID: "9c118e33-8f43-4a93-9fba-c4562daa381b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.132246 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:09 crc kubenswrapper[4652]: E1205 05:29:09.132403 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.632371731 +0000 UTC m=+151.869101998 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.132509 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.132641 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wq4t\" (UniqueName: \"kubernetes.io/projected/9c118e33-8f43-4a93-9fba-c4562daa381b-kube-api-access-7wq4t\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.132661 4652 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c118e33-8f43-4a93-9fba-c4562daa381b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.132671 4652 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c118e33-8f43-4a93-9fba-c4562daa381b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:09 crc kubenswrapper[4652]: E1205 05:29:09.132824 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.632815676 +0000 UTC m=+151.869545943 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.234238 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:09 crc kubenswrapper[4652]: E1205 05:29:09.234413 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.73438746 +0000 UTC m=+151.971117727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.234868 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:09 crc kubenswrapper[4652]: E1205 05:29:09.235221 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:29:09.735194828 +0000 UTC m=+151.971925095 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n2n75" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.259338 4652 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T05:29:08.718206204Z","Handler":null,"Name":""} Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.263006 4652 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.263050 4652 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.336472 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.353860 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.426118 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t95vx" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.437779 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.442607 4652 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.442649 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.522296 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n2n75\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.533626 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.535421 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g" event={"ID":"9c118e33-8f43-4a93-9fba-c4562daa381b","Type":"ContainerDied","Data":"681c1ff55ff05d019d64f221b4632ab23ec6f7d9896240b878e4293d886494ad"} Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.535498 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="681c1ff55ff05d019d64f221b4632ab23ec6f7d9896240b878e4293d886494ad" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.540817 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-26kkh" event={"ID":"2af0519d-fd20-46ec-a6d6-4c2532918731","Type":"ContainerStarted","Data":"3a08825aba66b67ebfbda8f7a3e82635861316d724c1cf5afbcb071da6897d88"} Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.540927 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-26kkh" event={"ID":"2af0519d-fd20-46ec-a6d6-4c2532918731","Type":"ContainerStarted","Data":"d32f1f624c337a4217c0a10ff99bb0ce2df4b9a9123d60d282619efdc6199cfb"} Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.546871 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.553032 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6qfmb" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.565502 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-26kkh" podStartSLOduration=8.565490336 podStartE2EDuration="8.565490336s" podCreationTimestamp="2025-12-05 05:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:09.564625809 +0000 UTC m=+151.801356077" watchObservedRunningTime="2025-12-05 05:29:09.565490336 +0000 UTC m=+151.802220593" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.575584 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-9nh89" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.580850 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:09 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:09 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:09 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.580887 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.610691 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-77ccc"] Dec 05 05:29:09 crc kubenswrapper[4652]: E1205 05:29:09.611056 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c118e33-8f43-4a93-9fba-c4562daa381b" containerName="collect-profiles" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.611123 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c118e33-8f43-4a93-9fba-c4562daa381b" containerName="collect-profiles" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.611315 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c118e33-8f43-4a93-9fba-c4562daa381b" containerName="collect-profiles" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.612148 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.613412 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.614606 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.650164 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-77ccc"] Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.751161 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k429\" (UniqueName: \"kubernetes.io/projected/fa397388-ad3b-4e68-8be2-a224f603593e-kube-api-access-8k429\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.751239 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-catalog-content\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.751331 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-utilities\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.825925 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fpx88"] Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.827177 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.833943 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.846929 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fpx88"] Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.857061 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k429\" (UniqueName: \"kubernetes.io/projected/fa397388-ad3b-4e68-8be2-a224f603593e-kube-api-access-8k429\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.857112 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-catalog-content\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.857176 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-utilities\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.857600 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-utilities\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.857807 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-catalog-content\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.910681 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k429\" (UniqueName: \"kubernetes.io/projected/fa397388-ad3b-4e68-8be2-a224f603593e-kube-api-access-8k429\") pod \"certified-operators-77ccc\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.958281 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-catalog-content\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.958350 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b2vb\" (UniqueName: \"kubernetes.io/projected/05d92c8c-f3b8-44ac-8657-74e7a6af4507-kube-api-access-7b2vb\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.958444 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-utilities\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:09 crc kubenswrapper[4652]: I1205 05:29:09.973753 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.031861 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xr259"] Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.036690 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.052606 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xr259"] Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.064237 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-utilities\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.064344 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-catalog-content\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.064383 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b2vb\" (UniqueName: \"kubernetes.io/projected/05d92c8c-f3b8-44ac-8657-74e7a6af4507-kube-api-access-7b2vb\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.065413 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-utilities\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.065677 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-catalog-content\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.091169 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b2vb\" (UniqueName: \"kubernetes.io/projected/05d92c8c-f3b8-44ac-8657-74e7a6af4507-kube-api-access-7b2vb\") pod \"community-operators-fpx88\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.138532 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.139494 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n2n75"] Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.165145 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-utilities\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.165186 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbl9z\" (UniqueName: \"kubernetes.io/projected/58d637d3-3ed9-4eed-ae8a-e0c619186080-kube-api-access-pbl9z\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.165216 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-catalog-content\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.184378 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.199520 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w9nxc"] Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.205097 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.256582 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w9nxc"] Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.266957 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-catalog-content\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.267029 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-utilities\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.267050 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-utilities\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.267075 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbl9z\" (UniqueName: \"kubernetes.io/projected/58d637d3-3ed9-4eed-ae8a-e0c619186080-kube-api-access-pbl9z\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.267099 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-catalog-content\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.267140 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-687ss\" (UniqueName: \"kubernetes.io/projected/e4426b00-7c51-48ad-9429-3364ec2209ee-kube-api-access-687ss\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.267608 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-utilities\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.267848 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-catalog-content\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.290403 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbl9z\" (UniqueName: \"kubernetes.io/projected/58d637d3-3ed9-4eed-ae8a-e0c619186080-kube-api-access-pbl9z\") pod \"certified-operators-xr259\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.293625 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-77ccc"] Dec 05 05:29:10 crc kubenswrapper[4652]: W1205 05:29:10.303331 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa397388_ad3b_4e68_8be2_a224f603593e.slice/crio-42c3d7538a17c5488056813e8a19254e25e40acc1b08d4a716b63beb58e45c86 WatchSource:0}: Error finding container 42c3d7538a17c5488056813e8a19254e25e40acc1b08d4a716b63beb58e45c86: Status 404 returned error can't find the container with id 42c3d7538a17c5488056813e8a19254e25e40acc1b08d4a716b63beb58e45c86 Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.369661 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-687ss\" (UniqueName: \"kubernetes.io/projected/e4426b00-7c51-48ad-9429-3364ec2209ee-kube-api-access-687ss\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.369782 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-catalog-content\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.369825 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-utilities\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.370239 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-utilities\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.370568 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-catalog-content\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.372762 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fpx88"] Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.372929 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:10 crc kubenswrapper[4652]: W1205 05:29:10.384657 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05d92c8c_f3b8_44ac_8657_74e7a6af4507.slice/crio-771328d56e2fdbd06d42c4a86520dea00fab94ecb93ad0338c20965217b50131 WatchSource:0}: Error finding container 771328d56e2fdbd06d42c4a86520dea00fab94ecb93ad0338c20965217b50131: Status 404 returned error can't find the container with id 771328d56e2fdbd06d42c4a86520dea00fab94ecb93ad0338c20965217b50131 Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.384741 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-687ss\" (UniqueName: \"kubernetes.io/projected/e4426b00-7c51-48ad-9429-3364ec2209ee-kube-api-access-687ss\") pod \"community-operators-w9nxc\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.521116 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xr259"] Dec 05 05:29:10 crc kubenswrapper[4652]: W1205 05:29:10.529328 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58d637d3_3ed9_4eed_ae8a_e0c619186080.slice/crio-834fe71b4cc4c5f69d04bd00fbd6189535b9e82c8d5deda63837d68f56b9f332 WatchSource:0}: Error finding container 834fe71b4cc4c5f69d04bd00fbd6189535b9e82c8d5deda63837d68f56b9f332: Status 404 returned error can't find the container with id 834fe71b4cc4c5f69d04bd00fbd6189535b9e82c8d5deda63837d68f56b9f332 Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.544002 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.558624 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xr259" event={"ID":"58d637d3-3ed9-4eed-ae8a-e0c619186080","Type":"ContainerStarted","Data":"834fe71b4cc4c5f69d04bd00fbd6189535b9e82c8d5deda63837d68f56b9f332"} Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.562919 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77ccc" event={"ID":"fa397388-ad3b-4e68-8be2-a224f603593e","Type":"ContainerDied","Data":"33e2bc129354078f57bfb2e0fb2af44f1cba849f4a9b71d251a9bc5d21d1da37"} Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.562800 4652 generic.go:334] "Generic (PLEG): container finished" podID="fa397388-ad3b-4e68-8be2-a224f603593e" containerID="33e2bc129354078f57bfb2e0fb2af44f1cba849f4a9b71d251a9bc5d21d1da37" exitCode=0 Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.563175 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77ccc" event={"ID":"fa397388-ad3b-4e68-8be2-a224f603593e","Type":"ContainerStarted","Data":"42c3d7538a17c5488056813e8a19254e25e40acc1b08d4a716b63beb58e45c86"} Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.564813 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.565912 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" event={"ID":"b85664fc-f6d6-49a2-b9a9-22c6b523d5de","Type":"ContainerStarted","Data":"471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b"} Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.565939 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" event={"ID":"b85664fc-f6d6-49a2-b9a9-22c6b523d5de","Type":"ContainerStarted","Data":"c2f9970eb9760de7027bff1f440b450e80a003a45e2e57c57cc5dd3070c17eb7"} Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.566320 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.570982 4652 generic.go:334] "Generic (PLEG): container finished" podID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerID="51489f10d8a546c0499b04ac30fa11d89fb7e1b252e32d1a085f71733bbd4eb1" exitCode=0 Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.571447 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fpx88" event={"ID":"05d92c8c-f3b8-44ac-8657-74e7a6af4507","Type":"ContainerDied","Data":"51489f10d8a546c0499b04ac30fa11d89fb7e1b252e32d1a085f71733bbd4eb1"} Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.571499 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fpx88" event={"ID":"05d92c8c-f3b8-44ac-8657-74e7a6af4507","Type":"ContainerStarted","Data":"771328d56e2fdbd06d42c4a86520dea00fab94ecb93ad0338c20965217b50131"} Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.585290 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:10 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:10 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:10 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.585368 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.612864 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" podStartSLOduration=128.61284258 podStartE2EDuration="2m8.61284258s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:10.599853061 +0000 UTC m=+152.836583328" watchObservedRunningTime="2025-12-05 05:29:10.61284258 +0000 UTC m=+152.849572837" Dec 05 05:29:10 crc kubenswrapper[4652]: I1205 05:29:10.710473 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w9nxc"] Dec 05 05:29:10 crc kubenswrapper[4652]: W1205 05:29:10.772537 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4426b00_7c51_48ad_9429_3364ec2209ee.slice/crio-e25a663708a0d6b7c48d1a44d41364644b84497846a35b8a60b8fd505e7401c0 WatchSource:0}: Error finding container e25a663708a0d6b7c48d1a44d41364644b84497846a35b8a60b8fd505e7401c0: Status 404 returned error can't find the container with id e25a663708a0d6b7c48d1a44d41364644b84497846a35b8a60b8fd505e7401c0 Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.258187 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.259233 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.262354 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.262615 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.304885 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.387607 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.387724 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.489013 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.489094 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.489301 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.511320 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.575793 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.580273 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:11 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:11 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:11 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.580398 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.593056 4652 generic.go:334] "Generic (PLEG): container finished" podID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerID="1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075" exitCode=0 Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.593126 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9nxc" event={"ID":"e4426b00-7c51-48ad-9429-3364ec2209ee","Type":"ContainerDied","Data":"1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075"} Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.593157 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9nxc" event={"ID":"e4426b00-7c51-48ad-9429-3364ec2209ee","Type":"ContainerStarted","Data":"e25a663708a0d6b7c48d1a44d41364644b84497846a35b8a60b8fd505e7401c0"} Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.595780 4652 generic.go:334] "Generic (PLEG): container finished" podID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerID="a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5" exitCode=0 Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.596097 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xr259" event={"ID":"58d637d3-3ed9-4eed-ae8a-e0c619186080","Type":"ContainerDied","Data":"a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5"} Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.803307 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-28xkx"] Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.805477 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.806692 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-28xkx"] Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.809480 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.903410 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-utilities\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.903492 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-catalog-content\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.903536 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhm7d\" (UniqueName: \"kubernetes.io/projected/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-kube-api-access-bhm7d\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:11 crc kubenswrapper[4652]: I1205 05:29:11.971599 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.004330 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-catalog-content\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.004375 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhm7d\" (UniqueName: \"kubernetes.io/projected/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-kube-api-access-bhm7d\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.004437 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-utilities\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.004913 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-utilities\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.004907 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-catalog-content\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.019319 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhm7d\" (UniqueName: \"kubernetes.io/projected/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-kube-api-access-bhm7d\") pod \"redhat-marketplace-28xkx\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.122394 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.199979 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2ksbj"] Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.200976 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.207126 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ksbj"] Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.307987 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-utilities\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.308037 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-catalog-content\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.308062 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2n4j\" (UniqueName: \"kubernetes.io/projected/3f656d85-178f-4d95-a477-423db9ba5505-kube-api-access-f2n4j\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.325520 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-28xkx"] Dec 05 05:29:12 crc kubenswrapper[4652]: W1205 05:29:12.331411 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96d794c5_fa0a_4763_97b3_0ea7b0ff45c4.slice/crio-5fab479fe9d181fef7c21c517df43505b2c8a571ccb27a08313d5bc5f2d07490 WatchSource:0}: Error finding container 5fab479fe9d181fef7c21c517df43505b2c8a571ccb27a08313d5bc5f2d07490: Status 404 returned error can't find the container with id 5fab479fe9d181fef7c21c517df43505b2c8a571ccb27a08313d5bc5f2d07490 Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.409499 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-utilities\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.409567 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-catalog-content\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.409595 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2n4j\" (UniqueName: \"kubernetes.io/projected/3f656d85-178f-4d95-a477-423db9ba5505-kube-api-access-f2n4j\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.410774 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-utilities\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.410860 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-catalog-content\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.429242 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2n4j\" (UniqueName: \"kubernetes.io/projected/3f656d85-178f-4d95-a477-423db9ba5505-kube-api-access-f2n4j\") pod \"redhat-marketplace-2ksbj\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.521425 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.581056 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:12 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:12 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:12 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.581100 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.605508 4652 generic.go:334] "Generic (PLEG): container finished" podID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerID="247c1da8c63c4f09cb40ab5d0d47c3def8e958284b5909553f07b38912b1c6be" exitCode=0 Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.605586 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-28xkx" event={"ID":"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4","Type":"ContainerDied","Data":"247c1da8c63c4f09cb40ab5d0d47c3def8e958284b5909553f07b38912b1c6be"} Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.605615 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-28xkx" event={"ID":"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4","Type":"ContainerStarted","Data":"5fab479fe9d181fef7c21c517df43505b2c8a571ccb27a08313d5bc5f2d07490"} Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.608299 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8","Type":"ContainerStarted","Data":"0be587280aa1f0955fe8cac7667ae4ae605004f60bb81e88b19c1542988091ee"} Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.608348 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8","Type":"ContainerStarted","Data":"edc0bdc8eaea3dc055565759614f2a31fef18e1b31abbb3f2ab1dd8154499df8"} Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.643887 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.64386564 podStartE2EDuration="1.64386564s" podCreationTimestamp="2025-12-05 05:29:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:12.642876619 +0000 UTC m=+154.879606886" watchObservedRunningTime="2025-12-05 05:29:12.64386564 +0000 UTC m=+154.880595907" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.798336 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m2vzf"] Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.799292 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.806968 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.822448 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m2vzf"] Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.911588 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ksbj"] Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.920883 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-utilities\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.920923 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-catalog-content\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:12 crc kubenswrapper[4652]: I1205 05:29:12.920996 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7gss\" (UniqueName: \"kubernetes.io/projected/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-kube-api-access-f7gss\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.027898 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7gss\" (UniqueName: \"kubernetes.io/projected/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-kube-api-access-f7gss\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.028054 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-utilities\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.028100 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-catalog-content\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.028670 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-catalog-content\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.028732 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-utilities\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.045517 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7gss\" (UniqueName: \"kubernetes.io/projected/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-kube-api-access-f7gss\") pod \"redhat-operators-m2vzf\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.169924 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.198216 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jk6zn"] Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.199764 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.206596 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jk6zn"] Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.345343 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-utilities\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.345455 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-catalog-content\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.345480 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx7jl\" (UniqueName: \"kubernetes.io/projected/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-kube-api-access-qx7jl\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.446285 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-utilities\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.446355 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-catalog-content\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.446384 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx7jl\" (UniqueName: \"kubernetes.io/projected/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-kube-api-access-qx7jl\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.447384 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-catalog-content\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.447387 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-utilities\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.470331 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx7jl\" (UniqueName: \"kubernetes.io/projected/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-kube-api-access-qx7jl\") pod \"redhat-operators-jk6zn\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.526047 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.583374 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:13 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:13 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:13 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.583433 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.627153 4652 generic.go:334] "Generic (PLEG): container finished" podID="9fb3c02a-eaa8-4789-9e1f-24305ae1bff8" containerID="0be587280aa1f0955fe8cac7667ae4ae605004f60bb81e88b19c1542988091ee" exitCode=0 Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.627207 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8","Type":"ContainerDied","Data":"0be587280aa1f0955fe8cac7667ae4ae605004f60bb81e88b19c1542988091ee"} Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.737608 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.742188 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-hb9pn" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.755019 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.755059 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.761349 4652 patch_prober.go:28] interesting pod/console-f9d7485db-pjnkl container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Dec 05 05:29:13 crc kubenswrapper[4652]: I1205 05:29:13.761389 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-pjnkl" podUID="f4bd4318-0406-40e2-8b50-b79c312bb10a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.158601 4652 patch_prober.go:28] interesting pod/downloads-7954f5f757-7lwtl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.158645 4652 patch_prober.go:28] interesting pod/downloads-7954f5f757-7lwtl container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.158702 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7lwtl" podUID="e26e158c-7bd6-45bd-be06-8cbe6c1d4912" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.158648 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7lwtl" podUID="e26e158c-7bd6-45bd-be06-8cbe6c1d4912" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.424539 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.426895 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.432349 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.441718 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.444822 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.560482 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9ea3899-ffce-4dcf-91ca-d610b627856d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b9ea3899-ffce-4dcf-91ca-d610b627856d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.560586 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9ea3899-ffce-4dcf-91ca-d610b627856d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b9ea3899-ffce-4dcf-91ca-d610b627856d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.577511 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.579644 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:14 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:14 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:14 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.579686 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.669292 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9ea3899-ffce-4dcf-91ca-d610b627856d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b9ea3899-ffce-4dcf-91ca-d610b627856d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.669416 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9ea3899-ffce-4dcf-91ca-d610b627856d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b9ea3899-ffce-4dcf-91ca-d610b627856d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.669498 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9ea3899-ffce-4dcf-91ca-d610b627856d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"b9ea3899-ffce-4dcf-91ca-d610b627856d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.684917 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9ea3899-ffce-4dcf-91ca-d610b627856d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"b9ea3899-ffce-4dcf-91ca-d610b627856d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:14 crc kubenswrapper[4652]: I1205 05:29:14.760195 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:15 crc kubenswrapper[4652]: I1205 05:29:15.579967 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:15 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:15 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:15 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:15 crc kubenswrapper[4652]: I1205 05:29:15.580028 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:16 crc kubenswrapper[4652]: I1205 05:29:16.580799 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:16 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:16 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:16 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:16 crc kubenswrapper[4652]: I1205 05:29:16.581591 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:16 crc kubenswrapper[4652]: I1205 05:29:16.676134 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-7qc7b" Dec 05 05:29:16 crc kubenswrapper[4652]: I1205 05:29:16.887060 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.013066 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kubelet-dir\") pod \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\" (UID: \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\") " Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.013128 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9fb3c02a-eaa8-4789-9e1f-24305ae1bff8" (UID: "9fb3c02a-eaa8-4789-9e1f-24305ae1bff8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.013288 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kube-api-access\") pod \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\" (UID: \"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8\") " Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.013822 4652 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.020964 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9fb3c02a-eaa8-4789-9e1f-24305ae1bff8" (UID: "9fb3c02a-eaa8-4789-9e1f-24305ae1bff8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.115646 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fb3c02a-eaa8-4789-9e1f-24305ae1bff8-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.510611 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 05:29:17 crc kubenswrapper[4652]: W1205 05:29:17.517626 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podb9ea3899_ffce_4dcf_91ca_d610b627856d.slice/crio-018b281c5f38c3d6a0f8b29408d5c7c8ad7c8041825f47daf15ee7d92c133722 WatchSource:0}: Error finding container 018b281c5f38c3d6a0f8b29408d5c7c8ad7c8041825f47daf15ee7d92c133722: Status 404 returned error can't find the container with id 018b281c5f38c3d6a0f8b29408d5c7c8ad7c8041825f47daf15ee7d92c133722 Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.580522 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:17 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:17 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:17 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.580623 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.657383 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m2vzf"] Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.685539 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jk6zn"] Dec 05 05:29:17 crc kubenswrapper[4652]: W1205 05:29:17.713680 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod458b1b02_8fd7_4d72_877e_03cd99ee1ae1.slice/crio-73e45491b034d75d35a26a032f09e1a0fdce17dde6bafbc56e945a6bc32fd815 WatchSource:0}: Error finding container 73e45491b034d75d35a26a032f09e1a0fdce17dde6bafbc56e945a6bc32fd815: Status 404 returned error can't find the container with id 73e45491b034d75d35a26a032f09e1a0fdce17dde6bafbc56e945a6bc32fd815 Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.720743 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9fb3c02a-eaa8-4789-9e1f-24305ae1bff8","Type":"ContainerDied","Data":"edc0bdc8eaea3dc055565759614f2a31fef18e1b31abbb3f2ab1dd8154499df8"} Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.720785 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edc0bdc8eaea3dc055565759614f2a31fef18e1b31abbb3f2ab1dd8154499df8" Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.720864 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.744036 4652 generic.go:334] "Generic (PLEG): container finished" podID="3f656d85-178f-4d95-a477-423db9ba5505" containerID="494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc" exitCode=0 Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.744112 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ksbj" event={"ID":"3f656d85-178f-4d95-a477-423db9ba5505","Type":"ContainerDied","Data":"494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc"} Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.744160 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ksbj" event={"ID":"3f656d85-178f-4d95-a477-423db9ba5505","Type":"ContainerStarted","Data":"6da1056843e03491cfd3de6d972718fc32949f2f90a792f9a167416efa863be6"} Dec 05 05:29:17 crc kubenswrapper[4652]: I1205 05:29:17.756033 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b9ea3899-ffce-4dcf-91ca-d610b627856d","Type":"ContainerStarted","Data":"018b281c5f38c3d6a0f8b29408d5c7c8ad7c8041825f47daf15ee7d92c133722"} Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.585851 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:18 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:18 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:18 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.586782 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.764644 4652 generic.go:334] "Generic (PLEG): container finished" podID="b9ea3899-ffce-4dcf-91ca-d610b627856d" containerID="52bef6199c222f08678cbc0789a7a9e483315dd961dddd94a7e605f6d8c717a2" exitCode=0 Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.764754 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b9ea3899-ffce-4dcf-91ca-d610b627856d","Type":"ContainerDied","Data":"52bef6199c222f08678cbc0789a7a9e483315dd961dddd94a7e605f6d8c717a2"} Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.767711 4652 generic.go:334] "Generic (PLEG): container finished" podID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerID="4b7920819e88119e430c6d5c4ce429c6f36980b159ab94a3de480e14a8be621d" exitCode=0 Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.768151 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vzf" event={"ID":"458b1b02-8fd7-4d72-877e-03cd99ee1ae1","Type":"ContainerDied","Data":"4b7920819e88119e430c6d5c4ce429c6f36980b159ab94a3de480e14a8be621d"} Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.768171 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vzf" event={"ID":"458b1b02-8fd7-4d72-877e-03cd99ee1ae1","Type":"ContainerStarted","Data":"73e45491b034d75d35a26a032f09e1a0fdce17dde6bafbc56e945a6bc32fd815"} Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.770545 4652 generic.go:334] "Generic (PLEG): container finished" podID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerID="9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938" exitCode=0 Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.770603 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jk6zn" event={"ID":"e280cb1d-0fe0-4aa0-b1c7-1dad19729843","Type":"ContainerDied","Data":"9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938"} Dec 05 05:29:18 crc kubenswrapper[4652]: I1205 05:29:18.770668 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jk6zn" event={"ID":"e280cb1d-0fe0-4aa0-b1c7-1dad19729843","Type":"ContainerStarted","Data":"4ad15c15be2cc358f2db654025d2d3eb0b90d7fb8847279acb77ad60c603a6cb"} Dec 05 05:29:19 crc kubenswrapper[4652]: I1205 05:29:19.579863 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:19 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:19 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:19 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:19 crc kubenswrapper[4652]: I1205 05:29:19.579933 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:20 crc kubenswrapper[4652]: I1205 05:29:20.581533 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:20 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:20 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:20 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:20 crc kubenswrapper[4652]: I1205 05:29:20.586999 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:21 crc kubenswrapper[4652]: I1205 05:29:21.583494 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:21 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:21 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:21 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:21 crc kubenswrapper[4652]: I1205 05:29:21.583567 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.204045 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.309490 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b9ea3899-ffce-4dcf-91ca-d610b627856d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b9ea3899-ffce-4dcf-91ca-d610b627856d" (UID: "b9ea3899-ffce-4dcf-91ca-d610b627856d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.309576 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9ea3899-ffce-4dcf-91ca-d610b627856d-kubelet-dir\") pod \"b9ea3899-ffce-4dcf-91ca-d610b627856d\" (UID: \"b9ea3899-ffce-4dcf-91ca-d610b627856d\") " Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.309706 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9ea3899-ffce-4dcf-91ca-d610b627856d-kube-api-access\") pod \"b9ea3899-ffce-4dcf-91ca-d610b627856d\" (UID: \"b9ea3899-ffce-4dcf-91ca-d610b627856d\") " Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.310727 4652 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b9ea3899-ffce-4dcf-91ca-d610b627856d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.327244 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ea3899-ffce-4dcf-91ca-d610b627856d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b9ea3899-ffce-4dcf-91ca-d610b627856d" (UID: "b9ea3899-ffce-4dcf-91ca-d610b627856d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.413269 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9ea3899-ffce-4dcf-91ca-d610b627856d-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.580737 4652 patch_prober.go:28] interesting pod/router-default-5444994796-cgsmk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:29:22 crc kubenswrapper[4652]: [-]has-synced failed: reason withheld Dec 05 05:29:22 crc kubenswrapper[4652]: [+]process-running ok Dec 05 05:29:22 crc kubenswrapper[4652]: healthz check failed Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.580799 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cgsmk" podUID="f03783a0-28f4-48b5-9b19-5c9e3923e3fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.797861 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"b9ea3899-ffce-4dcf-91ca-d610b627856d","Type":"ContainerDied","Data":"018b281c5f38c3d6a0f8b29408d5c7c8ad7c8041825f47daf15ee7d92c133722"} Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.797905 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="018b281c5f38c3d6a0f8b29408d5c7c8ad7c8041825f47daf15ee7d92c133722" Dec 05 05:29:22 crc kubenswrapper[4652]: I1205 05:29:22.797954 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:29:23 crc kubenswrapper[4652]: I1205 05:29:23.580439 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:23 crc kubenswrapper[4652]: I1205 05:29:23.582598 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-cgsmk" Dec 05 05:29:23 crc kubenswrapper[4652]: I1205 05:29:23.732328 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:29:23 crc kubenswrapper[4652]: I1205 05:29:23.742409 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b-metrics-certs\") pod \"network-metrics-daemon-vjg6c\" (UID: \"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b\") " pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:29:23 crc kubenswrapper[4652]: I1205 05:29:23.754881 4652 patch_prober.go:28] interesting pod/console-f9d7485db-pjnkl container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Dec 05 05:29:23 crc kubenswrapper[4652]: I1205 05:29:23.754938 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-pjnkl" podUID="f4bd4318-0406-40e2-8b50-b79c312bb10a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Dec 05 05:29:23 crc kubenswrapper[4652]: I1205 05:29:23.836139 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vjg6c" Dec 05 05:29:24 crc kubenswrapper[4652]: I1205 05:29:24.179694 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7lwtl" Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.134599 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-vjg6c"] Dec 05 05:29:26 crc kubenswrapper[4652]: W1205 05:29:26.174445 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72e42ce9_3afb_4be1_87c3_8bc1fcb97d9b.slice/crio-d2f2ffd586f21a322de1d45b02c7e021deb40b4da604778960f240b0c978401e WatchSource:0}: Error finding container d2f2ffd586f21a322de1d45b02c7e021deb40b4da604778960f240b0c978401e: Status 404 returned error can't find the container with id d2f2ffd586f21a322de1d45b02c7e021deb40b4da604778960f240b0c978401e Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.824046 4652 generic.go:334] "Generic (PLEG): container finished" podID="3f656d85-178f-4d95-a477-423db9ba5505" containerID="defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98" exitCode=0 Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.824125 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ksbj" event={"ID":"3f656d85-178f-4d95-a477-423db9ba5505","Type":"ContainerDied","Data":"defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.827358 4652 generic.go:334] "Generic (PLEG): container finished" podID="fa397388-ad3b-4e68-8be2-a224f603593e" containerID="51e416e3bf24a793c42a809939cd70b14dd8fa3bb7bb96c32fa5963b47527074" exitCode=0 Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.827438 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77ccc" event={"ID":"fa397388-ad3b-4e68-8be2-a224f603593e","Type":"ContainerDied","Data":"51e416e3bf24a793c42a809939cd70b14dd8fa3bb7bb96c32fa5963b47527074"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.830674 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fpx88" event={"ID":"05d92c8c-f3b8-44ac-8657-74e7a6af4507","Type":"ContainerDied","Data":"11c6ec112a0a3de59e9cc07bd9eb9d7ef7fa3e952e95f2ab989fe1fdfc81bd70"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.831488 4652 generic.go:334] "Generic (PLEG): container finished" podID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerID="11c6ec112a0a3de59e9cc07bd9eb9d7ef7fa3e952e95f2ab989fe1fdfc81bd70" exitCode=0 Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.835245 4652 generic.go:334] "Generic (PLEG): container finished" podID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerID="15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052" exitCode=0 Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.835316 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9nxc" event={"ID":"e4426b00-7c51-48ad-9429-3364ec2209ee","Type":"ContainerDied","Data":"15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.837624 4652 generic.go:334] "Generic (PLEG): container finished" podID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerID="4d96ead2dc25a84f1f3fa60d9a89efe46f0794ed350d902d7691e93a428a1260" exitCode=0 Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.838096 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-28xkx" event={"ID":"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4","Type":"ContainerDied","Data":"4d96ead2dc25a84f1f3fa60d9a89efe46f0794ed350d902d7691e93a428a1260"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.843771 4652 generic.go:334] "Generic (PLEG): container finished" podID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerID="eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244" exitCode=0 Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.843910 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xr259" event={"ID":"58d637d3-3ed9-4eed-ae8a-e0c619186080","Type":"ContainerDied","Data":"eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.846307 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" event={"ID":"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b","Type":"ContainerStarted","Data":"2714ae82e9cf44b4fdf5bf65041a268573cd4efe02f0bb72d2670a54c162c595"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.846336 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" event={"ID":"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b","Type":"ContainerStarted","Data":"87452c9837fe5a06fa26fa852e154ae180f7145d723f461f407613d98983cc82"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.846353 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vjg6c" event={"ID":"72e42ce9-3afb-4be1-87c3-8bc1fcb97d9b","Type":"ContainerStarted","Data":"d2f2ffd586f21a322de1d45b02c7e021deb40b4da604778960f240b0c978401e"} Dec 05 05:29:26 crc kubenswrapper[4652]: I1205 05:29:26.920017 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-vjg6c" podStartSLOduration=144.919991606 podStartE2EDuration="2m24.919991606s" podCreationTimestamp="2025-12-05 05:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:26.916819787 +0000 UTC m=+169.153550054" watchObservedRunningTime="2025-12-05 05:29:26.919991606 +0000 UTC m=+169.156721872" Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.855229 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fpx88" event={"ID":"05d92c8c-f3b8-44ac-8657-74e7a6af4507","Type":"ContainerStarted","Data":"a4a22945a3877611e2c08799a2b437687c43db7a9e7d5847b99a02aafbf8d902"} Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.857332 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9nxc" event={"ID":"e4426b00-7c51-48ad-9429-3364ec2209ee","Type":"ContainerStarted","Data":"7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f"} Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.860097 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-28xkx" event={"ID":"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4","Type":"ContainerStarted","Data":"0d6b305ac630f378d1758d62fbbbc341c0cd4cdb0fd06eea1cb914ff45812737"} Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.862285 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xr259" event={"ID":"58d637d3-3ed9-4eed-ae8a-e0c619186080","Type":"ContainerStarted","Data":"799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8"} Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.864286 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ksbj" event={"ID":"3f656d85-178f-4d95-a477-423db9ba5505","Type":"ContainerStarted","Data":"a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61"} Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.866962 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77ccc" event={"ID":"fa397388-ad3b-4e68-8be2-a224f603593e","Type":"ContainerStarted","Data":"5ef5612a4a93d9f1aa77cd5940324c20a5957521b124615a171192eb15fa6f44"} Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.873245 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fpx88" podStartSLOduration=2.081465052 podStartE2EDuration="18.873235957s" podCreationTimestamp="2025-12-05 05:29:09 +0000 UTC" firstStartedPulling="2025-12-05 05:29:10.572310594 +0000 UTC m=+152.809040852" lastFinishedPulling="2025-12-05 05:29:27.36408149 +0000 UTC m=+169.600811757" observedRunningTime="2025-12-05 05:29:27.872315997 +0000 UTC m=+170.109046264" watchObservedRunningTime="2025-12-05 05:29:27.873235957 +0000 UTC m=+170.109966224" Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.888234 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-28xkx" podStartSLOduration=2.192143182 podStartE2EDuration="16.888209909s" podCreationTimestamp="2025-12-05 05:29:11 +0000 UTC" firstStartedPulling="2025-12-05 05:29:12.614672426 +0000 UTC m=+154.851402694" lastFinishedPulling="2025-12-05 05:29:27.310739154 +0000 UTC m=+169.547469421" observedRunningTime="2025-12-05 05:29:27.886146146 +0000 UTC m=+170.122876414" watchObservedRunningTime="2025-12-05 05:29:27.888209909 +0000 UTC m=+170.124940176" Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.899706 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2ksbj" podStartSLOduration=6.336533106 podStartE2EDuration="15.899684556s" podCreationTimestamp="2025-12-05 05:29:12 +0000 UTC" firstStartedPulling="2025-12-05 05:29:17.754388299 +0000 UTC m=+159.991118566" lastFinishedPulling="2025-12-05 05:29:27.317539749 +0000 UTC m=+169.554270016" observedRunningTime="2025-12-05 05:29:27.897153255 +0000 UTC m=+170.133883522" watchObservedRunningTime="2025-12-05 05:29:27.899684556 +0000 UTC m=+170.136414823" Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.916267 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w9nxc" podStartSLOduration=2.120029702 podStartE2EDuration="17.916252498s" podCreationTimestamp="2025-12-05 05:29:10 +0000 UTC" firstStartedPulling="2025-12-05 05:29:11.595901084 +0000 UTC m=+153.832631350" lastFinishedPulling="2025-12-05 05:29:27.392123879 +0000 UTC m=+169.628854146" observedRunningTime="2025-12-05 05:29:27.91084839 +0000 UTC m=+170.147578657" watchObservedRunningTime="2025-12-05 05:29:27.916252498 +0000 UTC m=+170.152982764" Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.928487 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xr259" podStartSLOduration=2.111941514 podStartE2EDuration="17.928477116s" podCreationTimestamp="2025-12-05 05:29:10 +0000 UTC" firstStartedPulling="2025-12-05 05:29:11.598041872 +0000 UTC m=+153.834772138" lastFinishedPulling="2025-12-05 05:29:27.414577473 +0000 UTC m=+169.651307740" observedRunningTime="2025-12-05 05:29:27.927106638 +0000 UTC m=+170.163836904" watchObservedRunningTime="2025-12-05 05:29:27.928477116 +0000 UTC m=+170.165207383" Dec 05 05:29:27 crc kubenswrapper[4652]: I1205 05:29:27.941381 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-77ccc" podStartSLOduration=2.144625098 podStartE2EDuration="18.941373789s" podCreationTimestamp="2025-12-05 05:29:09 +0000 UTC" firstStartedPulling="2025-12-05 05:29:10.564586912 +0000 UTC m=+152.801317180" lastFinishedPulling="2025-12-05 05:29:27.361335604 +0000 UTC m=+169.598065871" observedRunningTime="2025-12-05 05:29:27.939534269 +0000 UTC m=+170.176264536" watchObservedRunningTime="2025-12-05 05:29:27.941373789 +0000 UTC m=+170.178104057" Dec 05 05:29:29 crc kubenswrapper[4652]: I1205 05:29:29.622933 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:29:29 crc kubenswrapper[4652]: I1205 05:29:29.975129 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:29 crc kubenswrapper[4652]: I1205 05:29:29.975207 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.185839 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.186122 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.323976 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.324655 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.373680 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.374027 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.417357 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.545382 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.545474 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:30 crc kubenswrapper[4652]: I1205 05:29:30.581324 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:32 crc kubenswrapper[4652]: I1205 05:29:32.123533 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:32 crc kubenswrapper[4652]: I1205 05:29:32.124078 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:32 crc kubenswrapper[4652]: I1205 05:29:32.155483 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:32 crc kubenswrapper[4652]: I1205 05:29:32.522196 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:32 crc kubenswrapper[4652]: I1205 05:29:32.522240 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:32 crc kubenswrapper[4652]: I1205 05:29:32.556611 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:32 crc kubenswrapper[4652]: I1205 05:29:32.930875 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:32 crc kubenswrapper[4652]: I1205 05:29:32.932434 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:29:33 crc kubenswrapper[4652]: I1205 05:29:33.248020 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:29:33 crc kubenswrapper[4652]: I1205 05:29:33.759435 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:33 crc kubenswrapper[4652]: I1205 05:29:33.763765 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:29:33 crc kubenswrapper[4652]: I1205 05:29:33.907119 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jk6zn" event={"ID":"e280cb1d-0fe0-4aa0-b1c7-1dad19729843","Type":"ContainerStarted","Data":"e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466"} Dec 05 05:29:33 crc kubenswrapper[4652]: I1205 05:29:33.909148 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vzf" event={"ID":"458b1b02-8fd7-4d72-877e-03cd99ee1ae1","Type":"ContainerStarted","Data":"b90c4b7009ba2a3a254c05ee2d9a81e3028ac45be892135063698a0050988911"} Dec 05 05:29:34 crc kubenswrapper[4652]: I1205 05:29:34.150535 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:29:34 crc kubenswrapper[4652]: I1205 05:29:34.150631 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:29:34 crc kubenswrapper[4652]: I1205 05:29:34.395306 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ksbj"] Dec 05 05:29:34 crc kubenswrapper[4652]: I1205 05:29:34.916857 4652 generic.go:334] "Generic (PLEG): container finished" podID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerID="b90c4b7009ba2a3a254c05ee2d9a81e3028ac45be892135063698a0050988911" exitCode=0 Dec 05 05:29:34 crc kubenswrapper[4652]: I1205 05:29:34.916954 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vzf" event={"ID":"458b1b02-8fd7-4d72-877e-03cd99ee1ae1","Type":"ContainerDied","Data":"b90c4b7009ba2a3a254c05ee2d9a81e3028ac45be892135063698a0050988911"} Dec 05 05:29:34 crc kubenswrapper[4652]: I1205 05:29:34.919955 4652 generic.go:334] "Generic (PLEG): container finished" podID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerID="e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466" exitCode=0 Dec 05 05:29:34 crc kubenswrapper[4652]: I1205 05:29:34.920330 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2ksbj" podUID="3f656d85-178f-4d95-a477-423db9ba5505" containerName="registry-server" containerID="cri-o://a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61" gracePeriod=2 Dec 05 05:29:34 crc kubenswrapper[4652]: I1205 05:29:34.920481 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jk6zn" event={"ID":"e280cb1d-0fe0-4aa0-b1c7-1dad19729843","Type":"ContainerDied","Data":"e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466"} Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.324463 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.481540 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-catalog-content\") pod \"3f656d85-178f-4d95-a477-423db9ba5505\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.481763 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-utilities\") pod \"3f656d85-178f-4d95-a477-423db9ba5505\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.481812 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2n4j\" (UniqueName: \"kubernetes.io/projected/3f656d85-178f-4d95-a477-423db9ba5505-kube-api-access-f2n4j\") pod \"3f656d85-178f-4d95-a477-423db9ba5505\" (UID: \"3f656d85-178f-4d95-a477-423db9ba5505\") " Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.482415 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-utilities" (OuterVolumeSpecName: "utilities") pod "3f656d85-178f-4d95-a477-423db9ba5505" (UID: "3f656d85-178f-4d95-a477-423db9ba5505"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.487671 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f656d85-178f-4d95-a477-423db9ba5505-kube-api-access-f2n4j" (OuterVolumeSpecName: "kube-api-access-f2n4j") pod "3f656d85-178f-4d95-a477-423db9ba5505" (UID: "3f656d85-178f-4d95-a477-423db9ba5505"). InnerVolumeSpecName "kube-api-access-f2n4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.510964 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f656d85-178f-4d95-a477-423db9ba5505" (UID: "3f656d85-178f-4d95-a477-423db9ba5505"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.584107 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.584276 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2n4j\" (UniqueName: \"kubernetes.io/projected/3f656d85-178f-4d95-a477-423db9ba5505-kube-api-access-f2n4j\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.584367 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f656d85-178f-4d95-a477-423db9ba5505-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.931508 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vzf" event={"ID":"458b1b02-8fd7-4d72-877e-03cd99ee1ae1","Type":"ContainerStarted","Data":"1890aea170b9cf22bb5c53fa380a44f23441a15db8d068f9d62f83dffff7e884"} Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.934758 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jk6zn" event={"ID":"e280cb1d-0fe0-4aa0-b1c7-1dad19729843","Type":"ContainerStarted","Data":"a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad"} Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.937363 4652 generic.go:334] "Generic (PLEG): container finished" podID="3f656d85-178f-4d95-a477-423db9ba5505" containerID="a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61" exitCode=0 Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.937487 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ksbj" event={"ID":"3f656d85-178f-4d95-a477-423db9ba5505","Type":"ContainerDied","Data":"a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61"} Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.937605 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ksbj" event={"ID":"3f656d85-178f-4d95-a477-423db9ba5505","Type":"ContainerDied","Data":"6da1056843e03491cfd3de6d972718fc32949f2f90a792f9a167416efa863be6"} Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.937690 4652 scope.go:117] "RemoveContainer" containerID="a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.937887 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ksbj" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.952376 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m2vzf" podStartSLOduration=7.281942912 podStartE2EDuration="23.952355588s" podCreationTimestamp="2025-12-05 05:29:12 +0000 UTC" firstStartedPulling="2025-12-05 05:29:18.769583184 +0000 UTC m=+161.006313452" lastFinishedPulling="2025-12-05 05:29:35.439995861 +0000 UTC m=+177.676726128" observedRunningTime="2025-12-05 05:29:35.946642669 +0000 UTC m=+178.183372937" watchObservedRunningTime="2025-12-05 05:29:35.952355588 +0000 UTC m=+178.189085845" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.963887 4652 scope.go:117] "RemoveContainer" containerID="defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.973940 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jk6zn" podStartSLOduration=6.249847147 podStartE2EDuration="22.973921581s" podCreationTimestamp="2025-12-05 05:29:13 +0000 UTC" firstStartedPulling="2025-12-05 05:29:18.772509159 +0000 UTC m=+161.009239426" lastFinishedPulling="2025-12-05 05:29:35.496583593 +0000 UTC m=+177.733313860" observedRunningTime="2025-12-05 05:29:35.967149231 +0000 UTC m=+178.203879497" watchObservedRunningTime="2025-12-05 05:29:35.973921581 +0000 UTC m=+178.210651849" Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.977038 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ksbj"] Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.980207 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ksbj"] Dec 05 05:29:35 crc kubenswrapper[4652]: I1205 05:29:35.999075 4652 scope.go:117] "RemoveContainer" containerID="494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc" Dec 05 05:29:36 crc kubenswrapper[4652]: I1205 05:29:36.010451 4652 scope.go:117] "RemoveContainer" containerID="a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61" Dec 05 05:29:36 crc kubenswrapper[4652]: E1205 05:29:36.010763 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61\": container with ID starting with a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61 not found: ID does not exist" containerID="a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61" Dec 05 05:29:36 crc kubenswrapper[4652]: I1205 05:29:36.010792 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61"} err="failed to get container status \"a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61\": rpc error: code = NotFound desc = could not find container \"a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61\": container with ID starting with a90b1c76616c76ffb0db7cdce0ef8f12c73fd13bdd6820460312f393525efc61 not found: ID does not exist" Dec 05 05:29:36 crc kubenswrapper[4652]: I1205 05:29:36.010825 4652 scope.go:117] "RemoveContainer" containerID="defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98" Dec 05 05:29:36 crc kubenswrapper[4652]: E1205 05:29:36.011267 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98\": container with ID starting with defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98 not found: ID does not exist" containerID="defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98" Dec 05 05:29:36 crc kubenswrapper[4652]: I1205 05:29:36.011292 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98"} err="failed to get container status \"defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98\": rpc error: code = NotFound desc = could not find container \"defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98\": container with ID starting with defa597bd64b5a7042c9beef1d79d0ee47fbb945c0d444cbe1c64f53ff914e98 not found: ID does not exist" Dec 05 05:29:36 crc kubenswrapper[4652]: I1205 05:29:36.011306 4652 scope.go:117] "RemoveContainer" containerID="494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc" Dec 05 05:29:36 crc kubenswrapper[4652]: E1205 05:29:36.011521 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc\": container with ID starting with 494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc not found: ID does not exist" containerID="494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc" Dec 05 05:29:36 crc kubenswrapper[4652]: I1205 05:29:36.011546 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc"} err="failed to get container status \"494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc\": rpc error: code = NotFound desc = could not find container \"494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc\": container with ID starting with 494cae0d07d7d1cb57c7abea7cb60d15685bed3ba319caefba5da8bccbea88bc not found: ID does not exist" Dec 05 05:29:36 crc kubenswrapper[4652]: I1205 05:29:36.133798 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f656d85-178f-4d95-a477-423db9ba5505" path="/var/lib/kubelet/pods/3f656d85-178f-4d95-a477-423db9ba5505/volumes" Dec 05 05:29:36 crc kubenswrapper[4652]: I1205 05:29:36.549488 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pslqt"] Dec 05 05:29:40 crc kubenswrapper[4652]: I1205 05:29:40.039725 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:29:40 crc kubenswrapper[4652]: I1205 05:29:40.214320 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:29:40 crc kubenswrapper[4652]: I1205 05:29:40.405862 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:40 crc kubenswrapper[4652]: I1205 05:29:40.575530 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.397883 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xr259"] Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.398310 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xr259" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerName="registry-server" containerID="cri-o://799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8" gracePeriod=2 Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.594885 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w9nxc"] Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.595132 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w9nxc" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerName="registry-server" containerID="cri-o://7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f" gracePeriod=2 Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.805575 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.976774 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-catalog-content\") pod \"58d637d3-3ed9-4eed-ae8a-e0c619186080\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.976932 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbl9z\" (UniqueName: \"kubernetes.io/projected/58d637d3-3ed9-4eed-ae8a-e0c619186080-kube-api-access-pbl9z\") pod \"58d637d3-3ed9-4eed-ae8a-e0c619186080\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.976962 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-utilities\") pod \"58d637d3-3ed9-4eed-ae8a-e0c619186080\" (UID: \"58d637d3-3ed9-4eed-ae8a-e0c619186080\") " Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.977734 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-utilities" (OuterVolumeSpecName: "utilities") pod "58d637d3-3ed9-4eed-ae8a-e0c619186080" (UID: "58d637d3-3ed9-4eed-ae8a-e0c619186080"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.984702 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58d637d3-3ed9-4eed-ae8a-e0c619186080-kube-api-access-pbl9z" (OuterVolumeSpecName: "kube-api-access-pbl9z") pod "58d637d3-3ed9-4eed-ae8a-e0c619186080" (UID: "58d637d3-3ed9-4eed-ae8a-e0c619186080"). InnerVolumeSpecName "kube-api-access-pbl9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:29:42 crc kubenswrapper[4652]: I1205 05:29:42.994927 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:42.999851 4652 generic.go:334] "Generic (PLEG): container finished" podID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerID="799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8" exitCode=0 Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:42.999907 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xr259" event={"ID":"58d637d3-3ed9-4eed-ae8a-e0c619186080","Type":"ContainerDied","Data":"799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8"} Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:42.999944 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xr259" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:42.999994 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xr259" event={"ID":"58d637d3-3ed9-4eed-ae8a-e0c619186080","Type":"ContainerDied","Data":"834fe71b4cc4c5f69d04bd00fbd6189535b9e82c8d5deda63837d68f56b9f332"} Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.000023 4652 scope.go:117] "RemoveContainer" containerID="799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.003296 4652 generic.go:334] "Generic (PLEG): container finished" podID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerID="7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f" exitCode=0 Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.003336 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9nxc" event={"ID":"e4426b00-7c51-48ad-9429-3364ec2209ee","Type":"ContainerDied","Data":"7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f"} Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.003364 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w9nxc" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.003370 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9nxc" event={"ID":"e4426b00-7c51-48ad-9429-3364ec2209ee","Type":"ContainerDied","Data":"e25a663708a0d6b7c48d1a44d41364644b84497846a35b8a60b8fd505e7401c0"} Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.024318 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58d637d3-3ed9-4eed-ae8a-e0c619186080" (UID: "58d637d3-3ed9-4eed-ae8a-e0c619186080"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.024533 4652 scope.go:117] "RemoveContainer" containerID="eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.039751 4652 scope.go:117] "RemoveContainer" containerID="a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.053962 4652 scope.go:117] "RemoveContainer" containerID="799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8" Dec 05 05:29:43 crc kubenswrapper[4652]: E1205 05:29:43.054303 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8\": container with ID starting with 799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8 not found: ID does not exist" containerID="799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.054338 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8"} err="failed to get container status \"799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8\": rpc error: code = NotFound desc = could not find container \"799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8\": container with ID starting with 799d78018062b28c0ab35bb4c8d1b8f0fcb96b996344d920456792cf844703b8 not found: ID does not exist" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.054361 4652 scope.go:117] "RemoveContainer" containerID="eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244" Dec 05 05:29:43 crc kubenswrapper[4652]: E1205 05:29:43.054730 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244\": container with ID starting with eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244 not found: ID does not exist" containerID="eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.054751 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244"} err="failed to get container status \"eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244\": rpc error: code = NotFound desc = could not find container \"eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244\": container with ID starting with eb06fc74d6a768d89db58860d211af150371071aa705381fed8654d8c298a244 not found: ID does not exist" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.054768 4652 scope.go:117] "RemoveContainer" containerID="a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5" Dec 05 05:29:43 crc kubenswrapper[4652]: E1205 05:29:43.055105 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5\": container with ID starting with a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5 not found: ID does not exist" containerID="a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.055127 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5"} err="failed to get container status \"a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5\": rpc error: code = NotFound desc = could not find container \"a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5\": container with ID starting with a581679914e297489e26087dad5a7ff039c0e8c0649eb4c11657cd707ebaaba5 not found: ID does not exist" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.055147 4652 scope.go:117] "RemoveContainer" containerID="7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.065860 4652 scope.go:117] "RemoveContainer" containerID="15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.077826 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-utilities\") pod \"e4426b00-7c51-48ad-9429-3364ec2209ee\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.077880 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-687ss\" (UniqueName: \"kubernetes.io/projected/e4426b00-7c51-48ad-9429-3364ec2209ee-kube-api-access-687ss\") pod \"e4426b00-7c51-48ad-9429-3364ec2209ee\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.077978 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-catalog-content\") pod \"e4426b00-7c51-48ad-9429-3364ec2209ee\" (UID: \"e4426b00-7c51-48ad-9429-3364ec2209ee\") " Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.078363 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.078384 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbl9z\" (UniqueName: \"kubernetes.io/projected/58d637d3-3ed9-4eed-ae8a-e0c619186080-kube-api-access-pbl9z\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.078395 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58d637d3-3ed9-4eed-ae8a-e0c619186080-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.078478 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-utilities" (OuterVolumeSpecName: "utilities") pod "e4426b00-7c51-48ad-9429-3364ec2209ee" (UID: "e4426b00-7c51-48ad-9429-3364ec2209ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.080504 4652 scope.go:117] "RemoveContainer" containerID="1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.082961 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4426b00-7c51-48ad-9429-3364ec2209ee-kube-api-access-687ss" (OuterVolumeSpecName: "kube-api-access-687ss") pod "e4426b00-7c51-48ad-9429-3364ec2209ee" (UID: "e4426b00-7c51-48ad-9429-3364ec2209ee"). InnerVolumeSpecName "kube-api-access-687ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.093637 4652 scope.go:117] "RemoveContainer" containerID="7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f" Dec 05 05:29:43 crc kubenswrapper[4652]: E1205 05:29:43.094608 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f\": container with ID starting with 7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f not found: ID does not exist" containerID="7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.094647 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f"} err="failed to get container status \"7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f\": rpc error: code = NotFound desc = could not find container \"7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f\": container with ID starting with 7ebbc4c2e0e01bb2245898c1fb30456747c8ef24a75251c7cc6dbdb227944e7f not found: ID does not exist" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.094673 4652 scope.go:117] "RemoveContainer" containerID="15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052" Dec 05 05:29:43 crc kubenswrapper[4652]: E1205 05:29:43.094962 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052\": container with ID starting with 15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052 not found: ID does not exist" containerID="15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.094991 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052"} err="failed to get container status \"15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052\": rpc error: code = NotFound desc = could not find container \"15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052\": container with ID starting with 15d1600cf2205896c0322d907b7af84aa6d397da580411b2b00f21671400d052 not found: ID does not exist" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.095016 4652 scope.go:117] "RemoveContainer" containerID="1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075" Dec 05 05:29:43 crc kubenswrapper[4652]: E1205 05:29:43.095549 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075\": container with ID starting with 1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075 not found: ID does not exist" containerID="1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.095600 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075"} err="failed to get container status \"1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075\": rpc error: code = NotFound desc = could not find container \"1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075\": container with ID starting with 1b753ad46215ca5c1b66e66ed859873470862af3e8cf41d4e7e2ae40a7917075 not found: ID does not exist" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.126068 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4426b00-7c51-48ad-9429-3364ec2209ee" (UID: "e4426b00-7c51-48ad-9429-3364ec2209ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.170696 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.171052 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.181094 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.181147 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-687ss\" (UniqueName: \"kubernetes.io/projected/e4426b00-7c51-48ad-9429-3364ec2209ee-kube-api-access-687ss\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.181164 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4426b00-7c51-48ad-9429-3364ec2209ee-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.208282 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.328354 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xr259"] Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.330746 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xr259"] Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.334861 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w9nxc"] Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.337810 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w9nxc"] Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.527809 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.527870 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:43 crc kubenswrapper[4652]: I1205 05:29:43.560193 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:44 crc kubenswrapper[4652]: I1205 05:29:44.047119 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:29:44 crc kubenswrapper[4652]: I1205 05:29:44.070927 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:44 crc kubenswrapper[4652]: I1205 05:29:44.137343 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" path="/var/lib/kubelet/pods/58d637d3-3ed9-4eed-ae8a-e0c619186080/volumes" Dec 05 05:29:44 crc kubenswrapper[4652]: I1205 05:29:44.138131 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" path="/var/lib/kubelet/pods/e4426b00-7c51-48ad-9429-3364ec2209ee/volumes" Dec 05 05:29:44 crc kubenswrapper[4652]: I1205 05:29:44.857626 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cgndz" Dec 05 05:29:46 crc kubenswrapper[4652]: I1205 05:29:46.992526 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jk6zn"] Dec 05 05:29:46 crc kubenswrapper[4652]: I1205 05:29:46.994884 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jk6zn" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerName="registry-server" containerID="cri-o://a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad" gracePeriod=2 Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.409287 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.529823 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx7jl\" (UniqueName: \"kubernetes.io/projected/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-kube-api-access-qx7jl\") pod \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.529901 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-catalog-content\") pod \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.529956 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-utilities\") pod \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\" (UID: \"e280cb1d-0fe0-4aa0-b1c7-1dad19729843\") " Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.531570 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-utilities" (OuterVolumeSpecName: "utilities") pod "e280cb1d-0fe0-4aa0-b1c7-1dad19729843" (UID: "e280cb1d-0fe0-4aa0-b1c7-1dad19729843"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.536864 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-kube-api-access-qx7jl" (OuterVolumeSpecName: "kube-api-access-qx7jl") pod "e280cb1d-0fe0-4aa0-b1c7-1dad19729843" (UID: "e280cb1d-0fe0-4aa0-b1c7-1dad19729843"). InnerVolumeSpecName "kube-api-access-qx7jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.606972 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e280cb1d-0fe0-4aa0-b1c7-1dad19729843" (UID: "e280cb1d-0fe0-4aa0-b1c7-1dad19729843"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.631459 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.631489 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx7jl\" (UniqueName: \"kubernetes.io/projected/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-kube-api-access-qx7jl\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:47 crc kubenswrapper[4652]: I1205 05:29:47.631506 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e280cb1d-0fe0-4aa0-b1c7-1dad19729843-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.037208 4652 generic.go:334] "Generic (PLEG): container finished" podID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerID="a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad" exitCode=0 Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.037309 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jk6zn" event={"ID":"e280cb1d-0fe0-4aa0-b1c7-1dad19729843","Type":"ContainerDied","Data":"a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad"} Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.037413 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jk6zn" event={"ID":"e280cb1d-0fe0-4aa0-b1c7-1dad19729843","Type":"ContainerDied","Data":"4ad15c15be2cc358f2db654025d2d3eb0b90d7fb8847279acb77ad60c603a6cb"} Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.037333 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jk6zn" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.037454 4652 scope.go:117] "RemoveContainer" containerID="a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.053426 4652 scope.go:117] "RemoveContainer" containerID="e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.074987 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jk6zn"] Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.079312 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jk6zn"] Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.083326 4652 scope.go:117] "RemoveContainer" containerID="9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.096863 4652 scope.go:117] "RemoveContainer" containerID="a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad" Dec 05 05:29:48 crc kubenswrapper[4652]: E1205 05:29:48.097393 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad\": container with ID starting with a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad not found: ID does not exist" containerID="a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.097442 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad"} err="failed to get container status \"a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad\": rpc error: code = NotFound desc = could not find container \"a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad\": container with ID starting with a33956f3f29fb621bd1225a712e3e185d099691ba0472c6e94a6a0ad944918ad not found: ID does not exist" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.097477 4652 scope.go:117] "RemoveContainer" containerID="e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466" Dec 05 05:29:48 crc kubenswrapper[4652]: E1205 05:29:48.097832 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466\": container with ID starting with e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466 not found: ID does not exist" containerID="e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.097857 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466"} err="failed to get container status \"e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466\": rpc error: code = NotFound desc = could not find container \"e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466\": container with ID starting with e6fe91ec2bb46f4e9de779a211cc9166f731b0faaa8f6fc59eade6041796b466 not found: ID does not exist" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.097876 4652 scope.go:117] "RemoveContainer" containerID="9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938" Dec 05 05:29:48 crc kubenswrapper[4652]: E1205 05:29:48.098200 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938\": container with ID starting with 9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938 not found: ID does not exist" containerID="9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.098337 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938"} err="failed to get container status \"9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938\": rpc error: code = NotFound desc = could not find container \"9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938\": container with ID starting with 9025459239e5f6788226643c177accfa9aefeb755a51eaef43906253e4904938 not found: ID does not exist" Dec 05 05:29:48 crc kubenswrapper[4652]: I1205 05:29:48.133104 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" path="/var/lib/kubelet/pods/e280cb1d-0fe0-4aa0-b1c7-1dad19729843/volumes" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.419310 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420116 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f656d85-178f-4d95-a477-423db9ba5505" containerName="extract-content" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420137 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f656d85-178f-4d95-a477-423db9ba5505" containerName="extract-content" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420147 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerName="extract-utilities" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420153 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerName="extract-utilities" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420166 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerName="extract-utilities" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420174 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerName="extract-utilities" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420182 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420188 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420199 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f656d85-178f-4d95-a477-423db9ba5505" containerName="extract-utilities" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420205 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f656d85-178f-4d95-a477-423db9ba5505" containerName="extract-utilities" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420213 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ea3899-ffce-4dcf-91ca-d610b627856d" containerName="pruner" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420219 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ea3899-ffce-4dcf-91ca-d610b627856d" containerName="pruner" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420240 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerName="extract-content" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420246 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerName="extract-content" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420255 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fb3c02a-eaa8-4789-9e1f-24305ae1bff8" containerName="pruner" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420262 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fb3c02a-eaa8-4789-9e1f-24305ae1bff8" containerName="pruner" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420269 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420276 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420288 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerName="extract-utilities" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420295 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerName="extract-utilities" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420304 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerName="extract-content" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420310 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerName="extract-content" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420320 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420326 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420336 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerName="extract-content" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420342 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerName="extract-content" Dec 05 05:29:50 crc kubenswrapper[4652]: E1205 05:29:50.420351 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f656d85-178f-4d95-a477-423db9ba5505" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420357 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f656d85-178f-4d95-a477-423db9ba5505" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420487 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ea3899-ffce-4dcf-91ca-d610b627856d" containerName="pruner" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420526 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f656d85-178f-4d95-a477-423db9ba5505" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420543 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="58d637d3-3ed9-4eed-ae8a-e0c619186080" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420566 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4426b00-7c51-48ad-9429-3364ec2209ee" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420573 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e280cb1d-0fe0-4aa0-b1c7-1dad19729843" containerName="registry-server" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.420581 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fb3c02a-eaa8-4789-9e1f-24305ae1bff8" containerName="pruner" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.421154 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.423544 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.423665 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.439961 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.563734 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c45c765a-8b36-4e78-8b56-e1b7199c5584-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c45c765a-8b36-4e78-8b56-e1b7199c5584\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.563988 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c45c765a-8b36-4e78-8b56-e1b7199c5584-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c45c765a-8b36-4e78-8b56-e1b7199c5584\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.665192 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c45c765a-8b36-4e78-8b56-e1b7199c5584-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c45c765a-8b36-4e78-8b56-e1b7199c5584\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.665259 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c45c765a-8b36-4e78-8b56-e1b7199c5584-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c45c765a-8b36-4e78-8b56-e1b7199c5584\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.665354 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c45c765a-8b36-4e78-8b56-e1b7199c5584-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c45c765a-8b36-4e78-8b56-e1b7199c5584\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.689637 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c45c765a-8b36-4e78-8b56-e1b7199c5584-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c45c765a-8b36-4e78-8b56-e1b7199c5584\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:50 crc kubenswrapper[4652]: I1205 05:29:50.735666 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:51 crc kubenswrapper[4652]: I1205 05:29:51.139129 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 05:29:51 crc kubenswrapper[4652]: W1205 05:29:51.150207 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podc45c765a_8b36_4e78_8b56_e1b7199c5584.slice/crio-048ac5ed75215971fcf5b0333df2eb3905fe56f7553f1c40b91afdf1386f7187 WatchSource:0}: Error finding container 048ac5ed75215971fcf5b0333df2eb3905fe56f7553f1c40b91afdf1386f7187: Status 404 returned error can't find the container with id 048ac5ed75215971fcf5b0333df2eb3905fe56f7553f1c40b91afdf1386f7187 Dec 05 05:29:52 crc kubenswrapper[4652]: I1205 05:29:52.064185 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c45c765a-8b36-4e78-8b56-e1b7199c5584","Type":"ContainerStarted","Data":"c5ace6bbd10e2f670491eb6bb2dfac7de823f801ea05335245c3e1a80b2603d0"} Dec 05 05:29:52 crc kubenswrapper[4652]: I1205 05:29:52.064585 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c45c765a-8b36-4e78-8b56-e1b7199c5584","Type":"ContainerStarted","Data":"048ac5ed75215971fcf5b0333df2eb3905fe56f7553f1c40b91afdf1386f7187"} Dec 05 05:29:52 crc kubenswrapper[4652]: I1205 05:29:52.079414 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.079380744 podStartE2EDuration="2.079380744s" podCreationTimestamp="2025-12-05 05:29:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:52.078082702 +0000 UTC m=+194.314812969" watchObservedRunningTime="2025-12-05 05:29:52.079380744 +0000 UTC m=+194.316111011" Dec 05 05:29:53 crc kubenswrapper[4652]: I1205 05:29:53.071801 4652 generic.go:334] "Generic (PLEG): container finished" podID="c45c765a-8b36-4e78-8b56-e1b7199c5584" containerID="c5ace6bbd10e2f670491eb6bb2dfac7de823f801ea05335245c3e1a80b2603d0" exitCode=0 Dec 05 05:29:53 crc kubenswrapper[4652]: I1205 05:29:53.071909 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c45c765a-8b36-4e78-8b56-e1b7199c5584","Type":"ContainerDied","Data":"c5ace6bbd10e2f670491eb6bb2dfac7de823f801ea05335245c3e1a80b2603d0"} Dec 05 05:29:54 crc kubenswrapper[4652]: I1205 05:29:54.327490 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:54 crc kubenswrapper[4652]: I1205 05:29:54.413231 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c45c765a-8b36-4e78-8b56-e1b7199c5584-kube-api-access\") pod \"c45c765a-8b36-4e78-8b56-e1b7199c5584\" (UID: \"c45c765a-8b36-4e78-8b56-e1b7199c5584\") " Dec 05 05:29:54 crc kubenswrapper[4652]: I1205 05:29:54.413309 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c45c765a-8b36-4e78-8b56-e1b7199c5584-kubelet-dir\") pod \"c45c765a-8b36-4e78-8b56-e1b7199c5584\" (UID: \"c45c765a-8b36-4e78-8b56-e1b7199c5584\") " Dec 05 05:29:54 crc kubenswrapper[4652]: I1205 05:29:54.413635 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c45c765a-8b36-4e78-8b56-e1b7199c5584-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c45c765a-8b36-4e78-8b56-e1b7199c5584" (UID: "c45c765a-8b36-4e78-8b56-e1b7199c5584"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:29:54 crc kubenswrapper[4652]: I1205 05:29:54.414014 4652 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c45c765a-8b36-4e78-8b56-e1b7199c5584-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:54 crc kubenswrapper[4652]: I1205 05:29:54.419527 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c45c765a-8b36-4e78-8b56-e1b7199c5584-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c45c765a-8b36-4e78-8b56-e1b7199c5584" (UID: "c45c765a-8b36-4e78-8b56-e1b7199c5584"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:29:54 crc kubenswrapper[4652]: I1205 05:29:54.515716 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c45c765a-8b36-4e78-8b56-e1b7199c5584-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:29:55 crc kubenswrapper[4652]: I1205 05:29:55.084492 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c45c765a-8b36-4e78-8b56-e1b7199c5584","Type":"ContainerDied","Data":"048ac5ed75215971fcf5b0333df2eb3905fe56f7553f1c40b91afdf1386f7187"} Dec 05 05:29:55 crc kubenswrapper[4652]: I1205 05:29:55.084546 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="048ac5ed75215971fcf5b0333df2eb3905fe56f7553f1c40b91afdf1386f7187" Dec 05 05:29:55 crc kubenswrapper[4652]: I1205 05:29:55.084575 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.614655 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 05:29:57 crc kubenswrapper[4652]: E1205 05:29:57.615837 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c45c765a-8b36-4e78-8b56-e1b7199c5584" containerName="pruner" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.615912 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c45c765a-8b36-4e78-8b56-e1b7199c5584" containerName="pruner" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.616104 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c45c765a-8b36-4e78-8b56-e1b7199c5584" containerName="pruner" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.616688 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.619387 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.619428 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.626820 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.749119 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-var-lock\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.749187 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0f7d739-f09a-454d-a2c7-baed080d73b3-kube-api-access\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.749225 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.850290 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0f7d739-f09a-454d-a2c7-baed080d73b3-kube-api-access\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.850346 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.850402 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-var-lock\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.850481 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-var-lock\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.850541 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.866621 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0f7d739-f09a-454d-a2c7-baed080d73b3-kube-api-access\") pod \"installer-9-crc\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:57 crc kubenswrapper[4652]: I1205 05:29:57.931205 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:29:58 crc kubenswrapper[4652]: I1205 05:29:58.266495 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 05:29:59 crc kubenswrapper[4652]: I1205 05:29:59.107404 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e0f7d739-f09a-454d-a2c7-baed080d73b3","Type":"ContainerStarted","Data":"e19de3c46e6d090adc484e75a729d37e743b189d9fba685b5f684b19b84a08ee"} Dec 05 05:29:59 crc kubenswrapper[4652]: I1205 05:29:59.107454 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e0f7d739-f09a-454d-a2c7-baed080d73b3","Type":"ContainerStarted","Data":"bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3"} Dec 05 05:29:59 crc kubenswrapper[4652]: I1205 05:29:59.123387 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.12337176 podStartE2EDuration="2.12337176s" podCreationTimestamp="2025-12-05 05:29:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:29:59.122539695 +0000 UTC m=+201.359269962" watchObservedRunningTime="2025-12-05 05:29:59.12337176 +0000 UTC m=+201.360102027" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.131160 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7"] Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.131927 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.133471 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.135054 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.137528 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7"] Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.280114 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffce5434-745e-4ed0-ad5f-b20f9ca06950-secret-volume\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.280186 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffce5434-745e-4ed0-ad5f-b20f9ca06950-config-volume\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.280221 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kjnt\" (UniqueName: \"kubernetes.io/projected/ffce5434-745e-4ed0-ad5f-b20f9ca06950-kube-api-access-4kjnt\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.381752 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffce5434-745e-4ed0-ad5f-b20f9ca06950-secret-volume\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.381992 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffce5434-745e-4ed0-ad5f-b20f9ca06950-config-volume\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.382069 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kjnt\" (UniqueName: \"kubernetes.io/projected/ffce5434-745e-4ed0-ad5f-b20f9ca06950-kube-api-access-4kjnt\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.384132 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffce5434-745e-4ed0-ad5f-b20f9ca06950-config-volume\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.390619 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffce5434-745e-4ed0-ad5f-b20f9ca06950-secret-volume\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.401746 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kjnt\" (UniqueName: \"kubernetes.io/projected/ffce5434-745e-4ed0-ad5f-b20f9ca06950-kube-api-access-4kjnt\") pod \"collect-profiles-29415210-gp9m7\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.446944 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:00 crc kubenswrapper[4652]: I1205 05:30:00.795413 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7"] Dec 05 05:30:00 crc kubenswrapper[4652]: W1205 05:30:00.801923 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podffce5434_745e_4ed0_ad5f_b20f9ca06950.slice/crio-f93bebdb792dbacc6f609691003b9c5680c45055f26977ebd96c53d27f81835c WatchSource:0}: Error finding container f93bebdb792dbacc6f609691003b9c5680c45055f26977ebd96c53d27f81835c: Status 404 returned error can't find the container with id f93bebdb792dbacc6f609691003b9c5680c45055f26977ebd96c53d27f81835c Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.120056 4652 generic.go:334] "Generic (PLEG): container finished" podID="ffce5434-745e-4ed0-ad5f-b20f9ca06950" containerID="d58dc165028e979f5b805b50bf54589625de1daa7c4b46aca75c9a5b58d102cc" exitCode=0 Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.120112 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" event={"ID":"ffce5434-745e-4ed0-ad5f-b20f9ca06950","Type":"ContainerDied","Data":"d58dc165028e979f5b805b50bf54589625de1daa7c4b46aca75c9a5b58d102cc"} Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.120143 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" event={"ID":"ffce5434-745e-4ed0-ad5f-b20f9ca06950","Type":"ContainerStarted","Data":"f93bebdb792dbacc6f609691003b9c5680c45055f26977ebd96c53d27f81835c"} Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.575264 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" podUID="15cb8768-99e0-4907-af4d-0167fff40d3f" containerName="oauth-openshift" containerID="cri-o://7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045" gracePeriod=15 Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.877431 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998426 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-router-certs\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998469 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-session\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998490 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vf45j\" (UniqueName: \"kubernetes.io/projected/15cb8768-99e0-4907-af4d-0167fff40d3f-kube-api-access-vf45j\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998527 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-ocp-branding-template\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998544 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-error\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998577 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-login\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998604 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-idp-0-file-data\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998646 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-provider-selection\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998682 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-trusted-ca-bundle\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998718 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-serving-cert\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998744 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-cliconfig\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998771 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-policies\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998798 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-service-ca\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.998847 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-dir\") pod \"15cb8768-99e0-4907-af4d-0167fff40d3f\" (UID: \"15cb8768-99e0-4907-af4d-0167fff40d3f\") " Dec 05 05:30:01 crc kubenswrapper[4652]: I1205 05:30:01.999170 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.000358 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.000503 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.000689 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.000666 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.005753 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.006066 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15cb8768-99e0-4907-af4d-0167fff40d3f-kube-api-access-vf45j" (OuterVolumeSpecName: "kube-api-access-vf45j") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "kube-api-access-vf45j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.006317 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.006847 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.007408 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.007680 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.007944 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.008252 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.008674 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "15cb8768-99e0-4907-af4d-0167fff40d3f" (UID: "15cb8768-99e0-4907-af4d-0167fff40d3f"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100187 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100217 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100230 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100249 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100259 4652 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100269 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100279 4652 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/15cb8768-99e0-4907-af4d-0167fff40d3f-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100288 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100296 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100304 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vf45j\" (UniqueName: \"kubernetes.io/projected/15cb8768-99e0-4907-af4d-0167fff40d3f-kube-api-access-vf45j\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100312 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100323 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100336 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.100344 4652 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/15cb8768-99e0-4907-af4d-0167fff40d3f-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.126664 4652 generic.go:334] "Generic (PLEG): container finished" podID="15cb8768-99e0-4907-af4d-0167fff40d3f" containerID="7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045" exitCode=0 Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.126854 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.142046 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" event={"ID":"15cb8768-99e0-4907-af4d-0167fff40d3f","Type":"ContainerDied","Data":"7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045"} Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.142081 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pslqt" event={"ID":"15cb8768-99e0-4907-af4d-0167fff40d3f","Type":"ContainerDied","Data":"7825faf9291764667f13c1bb76e72a4d8c6c896d27c6deb7cc96342ce0b028ef"} Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.142101 4652 scope.go:117] "RemoveContainer" containerID="7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.158679 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pslqt"] Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.161589 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pslqt"] Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.166757 4652 scope.go:117] "RemoveContainer" containerID="7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045" Dec 05 05:30:02 crc kubenswrapper[4652]: E1205 05:30:02.167075 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045\": container with ID starting with 7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045 not found: ID does not exist" containerID="7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.167101 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045"} err="failed to get container status \"7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045\": rpc error: code = NotFound desc = could not find container \"7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045\": container with ID starting with 7fbd6ecc90e3fbf4b266f84393e9739b379585e82b10e69ab867babb96219045 not found: ID does not exist" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.312841 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.404255 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffce5434-745e-4ed0-ad5f-b20f9ca06950-config-volume\") pod \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.404353 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffce5434-745e-4ed0-ad5f-b20f9ca06950-secret-volume\") pod \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.404374 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kjnt\" (UniqueName: \"kubernetes.io/projected/ffce5434-745e-4ed0-ad5f-b20f9ca06950-kube-api-access-4kjnt\") pod \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\" (UID: \"ffce5434-745e-4ed0-ad5f-b20f9ca06950\") " Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.405189 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffce5434-745e-4ed0-ad5f-b20f9ca06950-config-volume" (OuterVolumeSpecName: "config-volume") pod "ffce5434-745e-4ed0-ad5f-b20f9ca06950" (UID: "ffce5434-745e-4ed0-ad5f-b20f9ca06950"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.408690 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffce5434-745e-4ed0-ad5f-b20f9ca06950-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ffce5434-745e-4ed0-ad5f-b20f9ca06950" (UID: "ffce5434-745e-4ed0-ad5f-b20f9ca06950"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.409032 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffce5434-745e-4ed0-ad5f-b20f9ca06950-kube-api-access-4kjnt" (OuterVolumeSpecName: "kube-api-access-4kjnt") pod "ffce5434-745e-4ed0-ad5f-b20f9ca06950" (UID: "ffce5434-745e-4ed0-ad5f-b20f9ca06950"). InnerVolumeSpecName "kube-api-access-4kjnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.505617 4652 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffce5434-745e-4ed0-ad5f-b20f9ca06950-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.505643 4652 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffce5434-745e-4ed0-ad5f-b20f9ca06950-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:02 crc kubenswrapper[4652]: I1205 05:30:02.505652 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kjnt\" (UniqueName: \"kubernetes.io/projected/ffce5434-745e-4ed0-ad5f-b20f9ca06950-kube-api-access-4kjnt\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:03 crc kubenswrapper[4652]: I1205 05:30:03.136088 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" event={"ID":"ffce5434-745e-4ed0-ad5f-b20f9ca06950","Type":"ContainerDied","Data":"f93bebdb792dbacc6f609691003b9c5680c45055f26977ebd96c53d27f81835c"} Dec 05 05:30:03 crc kubenswrapper[4652]: I1205 05:30:03.136137 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f93bebdb792dbacc6f609691003b9c5680c45055f26977ebd96c53d27f81835c" Dec 05 05:30:03 crc kubenswrapper[4652]: I1205 05:30:03.136154 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7" Dec 05 05:30:04 crc kubenswrapper[4652]: I1205 05:30:04.136330 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15cb8768-99e0-4907-af4d-0167fff40d3f" path="/var/lib/kubelet/pods/15cb8768-99e0-4907-af4d-0167fff40d3f/volumes" Dec 05 05:30:04 crc kubenswrapper[4652]: I1205 05:30:04.150957 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:30:04 crc kubenswrapper[4652]: I1205 05:30:04.151289 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:30:04 crc kubenswrapper[4652]: I1205 05:30:04.151330 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:30:04 crc kubenswrapper[4652]: I1205 05:30:04.151747 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:30:04 crc kubenswrapper[4652]: I1205 05:30:04.151811 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa" gracePeriod=600 Dec 05 05:30:05 crc kubenswrapper[4652]: I1205 05:30:05.149184 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa" exitCode=0 Dec 05 05:30:05 crc kubenswrapper[4652]: I1205 05:30:05.149267 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa"} Dec 05 05:30:05 crc kubenswrapper[4652]: I1205 05:30:05.149514 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"4e90520058f578e3fe086566cbaa2b220e39ddf1f77b6c161c8e55a7db53ca5a"} Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.230923 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-cc7989dc6-c697s"] Dec 05 05:30:07 crc kubenswrapper[4652]: E1205 05:30:07.231599 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15cb8768-99e0-4907-af4d-0167fff40d3f" containerName="oauth-openshift" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.231613 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="15cb8768-99e0-4907-af4d-0167fff40d3f" containerName="oauth-openshift" Dec 05 05:30:07 crc kubenswrapper[4652]: E1205 05:30:07.231624 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffce5434-745e-4ed0-ad5f-b20f9ca06950" containerName="collect-profiles" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.231631 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffce5434-745e-4ed0-ad5f-b20f9ca06950" containerName="collect-profiles" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.231731 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="15cb8768-99e0-4907-af4d-0167fff40d3f" containerName="oauth-openshift" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.231744 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffce5434-745e-4ed0-ad5f-b20f9ca06950" containerName="collect-profiles" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.232181 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.234581 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.234578 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.234659 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.234976 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.235400 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.235479 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.235817 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.235837 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.235860 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.236963 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.237095 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.237459 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.242850 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.244892 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-cc7989dc6-c697s"] Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.247901 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.250086 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.354848 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-serving-cert\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.354926 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlf2k\" (UniqueName: \"kubernetes.io/projected/b0438f83-afaa-44ef-88dc-76636e1b0837-kube-api-access-wlf2k\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.354957 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355066 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355114 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b0438f83-afaa-44ef-88dc-76636e1b0837-audit-dir\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355354 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355451 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-router-certs\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355486 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-service-ca\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355708 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-audit-policies\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355754 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-login\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355804 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-session\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355898 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355959 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-error\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.355998 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-cliconfig\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457422 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-audit-policies\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457473 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-login\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457508 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-session\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457542 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457585 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-error\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457607 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-cliconfig\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457632 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-serving-cert\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457658 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457682 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlf2k\" (UniqueName: \"kubernetes.io/projected/b0438f83-afaa-44ef-88dc-76636e1b0837-kube-api-access-wlf2k\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457707 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457726 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b0438f83-afaa-44ef-88dc-76636e1b0837-audit-dir\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457746 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457776 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-router-certs\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.457798 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-service-ca\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.458377 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-audit-policies\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.458448 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b0438f83-afaa-44ef-88dc-76636e1b0837-audit-dir\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.458922 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-cliconfig\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.459353 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-service-ca\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.459709 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.465045 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.465332 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-error\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.465495 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-login\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.465681 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-serving-cert\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.465968 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-router-certs\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.466201 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.466396 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.467087 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b0438f83-afaa-44ef-88dc-76636e1b0837-v4-0-config-system-session\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.472072 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlf2k\" (UniqueName: \"kubernetes.io/projected/b0438f83-afaa-44ef-88dc-76636e1b0837-kube-api-access-wlf2k\") pod \"oauth-openshift-cc7989dc6-c697s\" (UID: \"b0438f83-afaa-44ef-88dc-76636e1b0837\") " pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.545757 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:07 crc kubenswrapper[4652]: I1205 05:30:07.901380 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-cc7989dc6-c697s"] Dec 05 05:30:08 crc kubenswrapper[4652]: I1205 05:30:08.169822 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" event={"ID":"b0438f83-afaa-44ef-88dc-76636e1b0837","Type":"ContainerStarted","Data":"c9a76c67c120308b24f0fecbd7ecd224c2023bfc1f7761a8e3a1b2dafc34f4c4"} Dec 05 05:30:08 crc kubenswrapper[4652]: I1205 05:30:08.170067 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:08 crc kubenswrapper[4652]: I1205 05:30:08.170081 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" event={"ID":"b0438f83-afaa-44ef-88dc-76636e1b0837","Type":"ContainerStarted","Data":"f535c6cfa31bd4942e25b6e01fddb51bf33bdeba82cc224f1253f3ac165ddfce"} Dec 05 05:30:08 crc kubenswrapper[4652]: I1205 05:30:08.185980 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" podStartSLOduration=32.185964956 podStartE2EDuration="32.185964956s" podCreationTimestamp="2025-12-05 05:29:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:30:08.184379944 +0000 UTC m=+210.421110211" watchObservedRunningTime="2025-12-05 05:30:08.185964956 +0000 UTC m=+210.422695223" Dec 05 05:30:08 crc kubenswrapper[4652]: I1205 05:30:08.387807 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-cc7989dc6-c697s" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.099158 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-77ccc"] Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.100291 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-77ccc" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" containerName="registry-server" containerID="cri-o://5ef5612a4a93d9f1aa77cd5940324c20a5957521b124615a171192eb15fa6f44" gracePeriod=30 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.105460 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fpx88"] Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.105697 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fpx88" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerName="registry-server" containerID="cri-o://a4a22945a3877611e2c08799a2b437687c43db7a9e7d5847b99a02aafbf8d902" gracePeriod=30 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.116920 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5ksc5"] Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.117100 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" containerName="marketplace-operator" containerID="cri-o://7d0f6a050c33c2ffdbc2c6d056a0e7f630d39652f1da07218a0024fbd8b47d58" gracePeriod=30 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.124857 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mtc94"] Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.125901 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.127737 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-28xkx"] Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.127885 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-28xkx" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerName="registry-server" containerID="cri-o://0d6b305ac630f378d1758d62fbbbc341c0cd4cdb0fd06eea1cb914ff45812737" gracePeriod=30 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.138624 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mtc94"] Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.143589 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m2vzf"] Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.143807 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m2vzf" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerName="registry-server" containerID="cri-o://1890aea170b9cf22bb5c53fa380a44f23441a15db8d068f9d62f83dffff7e884" gracePeriod=30 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.287043 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/befe0b4e-bc8f-4a52-8485-aa822dc69415-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.287382 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/befe0b4e-bc8f-4a52-8485-aa822dc69415-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.287413 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4xb9\" (UniqueName: \"kubernetes.io/projected/befe0b4e-bc8f-4a52-8485-aa822dc69415-kube-api-access-d4xb9\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.361973 4652 generic.go:334] "Generic (PLEG): container finished" podID="fa397388-ad3b-4e68-8be2-a224f603593e" containerID="5ef5612a4a93d9f1aa77cd5940324c20a5957521b124615a171192eb15fa6f44" exitCode=0 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.362084 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77ccc" event={"ID":"fa397388-ad3b-4e68-8be2-a224f603593e","Type":"ContainerDied","Data":"5ef5612a4a93d9f1aa77cd5940324c20a5957521b124615a171192eb15fa6f44"} Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.366512 4652 generic.go:334] "Generic (PLEG): container finished" podID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerID="1890aea170b9cf22bb5c53fa380a44f23441a15db8d068f9d62f83dffff7e884" exitCode=0 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.366590 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vzf" event={"ID":"458b1b02-8fd7-4d72-877e-03cd99ee1ae1","Type":"ContainerDied","Data":"1890aea170b9cf22bb5c53fa380a44f23441a15db8d068f9d62f83dffff7e884"} Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.368253 4652 generic.go:334] "Generic (PLEG): container finished" podID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerID="a4a22945a3877611e2c08799a2b437687c43db7a9e7d5847b99a02aafbf8d902" exitCode=0 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.368311 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fpx88" event={"ID":"05d92c8c-f3b8-44ac-8657-74e7a6af4507","Type":"ContainerDied","Data":"a4a22945a3877611e2c08799a2b437687c43db7a9e7d5847b99a02aafbf8d902"} Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.369882 4652 generic.go:334] "Generic (PLEG): container finished" podID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerID="0d6b305ac630f378d1758d62fbbbc341c0cd4cdb0fd06eea1cb914ff45812737" exitCode=0 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.369930 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-28xkx" event={"ID":"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4","Type":"ContainerDied","Data":"0d6b305ac630f378d1758d62fbbbc341c0cd4cdb0fd06eea1cb914ff45812737"} Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.371171 4652 generic.go:334] "Generic (PLEG): container finished" podID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" containerID="7d0f6a050c33c2ffdbc2c6d056a0e7f630d39652f1da07218a0024fbd8b47d58" exitCode=0 Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.371201 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" event={"ID":"d8b9d5f4-186a-4646-ab32-0f3c63e23676","Type":"ContainerDied","Data":"7d0f6a050c33c2ffdbc2c6d056a0e7f630d39652f1da07218a0024fbd8b47d58"} Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.388912 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/befe0b4e-bc8f-4a52-8485-aa822dc69415-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.388966 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/befe0b4e-bc8f-4a52-8485-aa822dc69415-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.388999 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4xb9\" (UniqueName: \"kubernetes.io/projected/befe0b4e-bc8f-4a52-8485-aa822dc69415-kube-api-access-d4xb9\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.391388 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/befe0b4e-bc8f-4a52-8485-aa822dc69415-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.403194 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/befe0b4e-bc8f-4a52-8485-aa822dc69415-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.408244 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4xb9\" (UniqueName: \"kubernetes.io/projected/befe0b4e-bc8f-4a52-8485-aa822dc69415-kube-api-access-d4xb9\") pod \"marketplace-operator-79b997595-mtc94\" (UID: \"befe0b4e-bc8f-4a52-8485-aa822dc69415\") " pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.445050 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.512245 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.579219 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.580906 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.588534 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.593311 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.694065 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-catalog-content\") pod \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.694130 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-utilities\") pod \"fa397388-ad3b-4e68-8be2-a224f603593e\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.694158 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhm7d\" (UniqueName: \"kubernetes.io/projected/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-kube-api-access-bhm7d\") pod \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.694185 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndkhk\" (UniqueName: \"kubernetes.io/projected/d8b9d5f4-186a-4646-ab32-0f3c63e23676-kube-api-access-ndkhk\") pod \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.694213 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k429\" (UniqueName: \"kubernetes.io/projected/fa397388-ad3b-4e68-8be2-a224f603593e-kube-api-access-8k429\") pod \"fa397388-ad3b-4e68-8be2-a224f603593e\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.694236 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-utilities\") pod \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.695720 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-catalog-content\") pod \"fa397388-ad3b-4e68-8be2-a224f603593e\" (UID: \"fa397388-ad3b-4e68-8be2-a224f603593e\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.695025 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-utilities" (OuterVolumeSpecName: "utilities") pod "05d92c8c-f3b8-44ac-8657-74e7a6af4507" (UID: "05d92c8c-f3b8-44ac-8657-74e7a6af4507"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.695038 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-utilities" (OuterVolumeSpecName: "utilities") pod "fa397388-ad3b-4e68-8be2-a224f603593e" (UID: "fa397388-ad3b-4e68-8be2-a224f603593e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.695817 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7gss\" (UniqueName: \"kubernetes.io/projected/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-kube-api-access-f7gss\") pod \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.695884 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-utilities\") pod \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\" (UID: \"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.695911 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-catalog-content\") pod \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.695944 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b2vb\" (UniqueName: \"kubernetes.io/projected/05d92c8c-f3b8-44ac-8657-74e7a6af4507-kube-api-access-7b2vb\") pod \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.695991 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-utilities\") pod \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\" (UID: \"458b1b02-8fd7-4d72-877e-03cd99ee1ae1\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.696015 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-operator-metrics\") pod \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.696045 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-catalog-content\") pod \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\" (UID: \"05d92c8c-f3b8-44ac-8657-74e7a6af4507\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.696076 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-trusted-ca\") pod \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\" (UID: \"d8b9d5f4-186a-4646-ab32-0f3c63e23676\") " Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.696924 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-utilities" (OuterVolumeSpecName: "utilities") pod "458b1b02-8fd7-4d72-877e-03cd99ee1ae1" (UID: "458b1b02-8fd7-4d72-877e-03cd99ee1ae1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.697339 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.697354 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.697383 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.697694 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-utilities" (OuterVolumeSpecName: "utilities") pod "96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" (UID: "96d794c5-fa0a-4763-97b3-0ea7b0ff45c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.698089 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "d8b9d5f4-186a-4646-ab32-0f3c63e23676" (UID: "d8b9d5f4-186a-4646-ab32-0f3c63e23676"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.700924 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "d8b9d5f4-186a-4646-ab32-0f3c63e23676" (UID: "d8b9d5f4-186a-4646-ab32-0f3c63e23676"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.701354 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05d92c8c-f3b8-44ac-8657-74e7a6af4507-kube-api-access-7b2vb" (OuterVolumeSpecName: "kube-api-access-7b2vb") pod "05d92c8c-f3b8-44ac-8657-74e7a6af4507" (UID: "05d92c8c-f3b8-44ac-8657-74e7a6af4507"). InnerVolumeSpecName "kube-api-access-7b2vb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.701826 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-kube-api-access-f7gss" (OuterVolumeSpecName: "kube-api-access-f7gss") pod "458b1b02-8fd7-4d72-877e-03cd99ee1ae1" (UID: "458b1b02-8fd7-4d72-877e-03cd99ee1ae1"). InnerVolumeSpecName "kube-api-access-f7gss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.701989 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa397388-ad3b-4e68-8be2-a224f603593e-kube-api-access-8k429" (OuterVolumeSpecName: "kube-api-access-8k429") pod "fa397388-ad3b-4e68-8be2-a224f603593e" (UID: "fa397388-ad3b-4e68-8be2-a224f603593e"). InnerVolumeSpecName "kube-api-access-8k429". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.707019 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8b9d5f4-186a-4646-ab32-0f3c63e23676-kube-api-access-ndkhk" (OuterVolumeSpecName: "kube-api-access-ndkhk") pod "d8b9d5f4-186a-4646-ab32-0f3c63e23676" (UID: "d8b9d5f4-186a-4646-ab32-0f3c63e23676"). InnerVolumeSpecName "kube-api-access-ndkhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.710318 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-kube-api-access-bhm7d" (OuterVolumeSpecName: "kube-api-access-bhm7d") pod "96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" (UID: "96d794c5-fa0a-4763-97b3-0ea7b0ff45c4"). InnerVolumeSpecName "kube-api-access-bhm7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.713934 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" (UID: "96d794c5-fa0a-4763-97b3-0ea7b0ff45c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.764669 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05d92c8c-f3b8-44ac-8657-74e7a6af4507" (UID: "05d92c8c-f3b8-44ac-8657-74e7a6af4507"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.778186 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa397388-ad3b-4e68-8be2-a224f603593e" (UID: "fa397388-ad3b-4e68-8be2-a224f603593e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798183 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7gss\" (UniqueName: \"kubernetes.io/projected/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-kube-api-access-f7gss\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798210 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798225 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b2vb\" (UniqueName: \"kubernetes.io/projected/05d92c8c-f3b8-44ac-8657-74e7a6af4507-kube-api-access-7b2vb\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798235 4652 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798248 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d92c8c-f3b8-44ac-8657-74e7a6af4507-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798257 4652 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d8b9d5f4-186a-4646-ab32-0f3c63e23676-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798275 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798284 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhm7d\" (UniqueName: \"kubernetes.io/projected/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4-kube-api-access-bhm7d\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798294 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndkhk\" (UniqueName: \"kubernetes.io/projected/d8b9d5f4-186a-4646-ab32-0f3c63e23676-kube-api-access-ndkhk\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798303 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k429\" (UniqueName: \"kubernetes.io/projected/fa397388-ad3b-4e68-8be2-a224f603593e-kube-api-access-8k429\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.798313 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa397388-ad3b-4e68-8be2-a224f603593e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.808384 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "458b1b02-8fd7-4d72-877e-03cd99ee1ae1" (UID: "458b1b02-8fd7-4d72-877e-03cd99ee1ae1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.886862 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mtc94"] Dec 05 05:30:35 crc kubenswrapper[4652]: I1205 05:30:35.899614 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/458b1b02-8fd7-4d72-877e-03cd99ee1ae1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022044 4652 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022366 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022387 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022397 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerName="extract-content" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022405 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerName="extract-content" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022413 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022419 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022426 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerName="extract-content" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022432 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerName="extract-content" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022441 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerName="extract-utilities" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022448 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerName="extract-utilities" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022460 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022467 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022472 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" containerName="extract-utilities" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022477 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" containerName="extract-utilities" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022486 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" containerName="marketplace-operator" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022492 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" containerName="marketplace-operator" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022503 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerName="extract-content" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022509 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerName="extract-content" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022520 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerName="extract-utilities" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022526 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerName="extract-utilities" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022532 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" containerName="extract-content" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022537 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" containerName="extract-content" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022544 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022550 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.022574 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerName="extract-utilities" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022579 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerName="extract-utilities" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022676 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022686 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022694 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022702 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" containerName="marketplace-operator" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.022710 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" containerName="registry-server" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023134 4652 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023255 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023550 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9" gracePeriod=15 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023606 4652 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023631 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59" gracePeriod=15 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023595 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7" gracePeriod=15 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023731 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e" gracePeriod=15 Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.023829 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023852 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.023869 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023877 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.023892 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023898 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.023908 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023916 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.023923 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023929 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.023937 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023943 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.023954 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.023960 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.024065 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.024077 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.024086 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.024094 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.024100 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.024107 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.026153 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f" gracePeriod=15 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.035063 4652 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.059141 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.203509 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.204502 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.204605 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.204693 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.204769 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.204893 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.204984 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.205075 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306316 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306370 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306400 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306419 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306454 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306495 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306521 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306633 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306712 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306758 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306735 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306775 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306803 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306829 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306837 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.306985 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.354206 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.378238 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.378385 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" event={"ID":"d8b9d5f4-186a-4646-ab32-0f3c63e23676","Type":"ContainerDied","Data":"d0acb04a8b4e2ee12d423b71eff6675c8c5a8f6fefabca8d8d0d3320598c7224"} Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.378501 4652 scope.go:117] "RemoveContainer" containerID="7d0f6a050c33c2ffdbc2c6d056a0e7f630d39652f1da07218a0024fbd8b47d58" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.379324 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.379801 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.380859 4652 generic.go:334] "Generic (PLEG): container finished" podID="e0f7d739-f09a-454d-a2c7-baed080d73b3" containerID="e19de3c46e6d090adc484e75a729d37e743b189d9fba685b5f684b19b84a08ee" exitCode=0 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.380934 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e0f7d739-f09a-454d-a2c7-baed080d73b3","Type":"ContainerDied","Data":"e19de3c46e6d090adc484e75a729d37e743b189d9fba685b5f684b19b84a08ee"} Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.381514 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.381734 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.381916 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.384050 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-77ccc" event={"ID":"fa397388-ad3b-4e68-8be2-a224f603593e","Type":"ContainerDied","Data":"42c3d7538a17c5488056813e8a19254e25e40acc1b08d4a716b63beb58e45c86"} Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.384176 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-77ccc" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.384786 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.385011 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.385297 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.385626 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.385952 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/0.log" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.385993 4652 generic.go:334] "Generic (PLEG): container finished" podID="befe0b4e-bc8f-4a52-8485-aa822dc69415" containerID="e5ea423648088c9d1575861ccddc5203c5639d89d94cbf054e7c9762b6498e43" exitCode=1 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.386233 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" event={"ID":"befe0b4e-bc8f-4a52-8485-aa822dc69415","Type":"ContainerDied","Data":"e5ea423648088c9d1575861ccddc5203c5639d89d94cbf054e7c9762b6498e43"} Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.386349 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" event={"ID":"befe0b4e-bc8f-4a52-8485-aa822dc69415","Type":"ContainerStarted","Data":"2dd3bfde876c4729cb7e17edf667e898a171efd851ce642f4b9222816b365b75"} Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.386326 4652 scope.go:117] "RemoveContainer" containerID="e5ea423648088c9d1575861ccddc5203c5639d89d94cbf054e7c9762b6498e43" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.386589 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.386830 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.387186 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.387607 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.388185 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: E1205 05:30:36.388279 4652 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events/marketplace-operator-79b997595-mtc94.187e3ab101e66220\": dial tcp 192.168.25.93:6443: connect: connection refused" event="&Event{ObjectMeta:{marketplace-operator-79b997595-mtc94.187e3ab101e66220 openshift-marketplace 29460 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-mtc94,UID:befe0b4e-bc8f-4a52-8485-aa822dc69415,APIVersion:v1,ResourceVersion:29429,FieldPath:spec.containers{marketplace-operator},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:30:35 +0000 UTC,LastTimestamp:2025-12-05 05:30:36.387669502 +0000 UTC m=+238.624399769,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.388923 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m2vzf" event={"ID":"458b1b02-8fd7-4d72-877e-03cd99ee1ae1","Type":"ContainerDied","Data":"73e45491b034d75d35a26a032f09e1a0fdce17dde6bafbc56e945a6bc32fd815"} Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.388956 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m2vzf" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.389785 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.390061 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.390309 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.390652 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.390835 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.391054 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.393167 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fpx88" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.393166 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fpx88" event={"ID":"05d92c8c-f3b8-44ac-8657-74e7a6af4507","Type":"ContainerDied","Data":"771328d56e2fdbd06d42c4a86520dea00fab94ecb93ad0338c20965217b50131"} Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.393783 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.394045 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.394278 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.394592 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.395244 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.395530 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.395733 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.395960 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.396196 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.396419 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.396691 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.396973 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.397092 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.397287 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.397510 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.397720 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.397830 4652 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7" exitCode=0 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.397857 4652 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9" exitCode=0 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.397869 4652 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59" exitCode=0 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.397878 4652 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e" exitCode=2 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.399710 4652 scope.go:117] "RemoveContainer" containerID="5ef5612a4a93d9f1aa77cd5940324c20a5957521b124615a171192eb15fa6f44" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.400476 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-28xkx" event={"ID":"96d794c5-fa0a-4763-97b3-0ea7b0ff45c4","Type":"ContainerDied","Data":"5fab479fe9d181fef7c21c517df43505b2c8a571ccb27a08313d5bc5f2d07490"} Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.400541 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-28xkx" Dec 05 05:30:36 crc kubenswrapper[4652]: W1205 05:30:36.400810 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-e9841a0c7f510854c576afd65b9c3b5d76ce6f566f1701dd3e444c6701dd5ce1 WatchSource:0}: Error finding container e9841a0c7f510854c576afd65b9c3b5d76ce6f566f1701dd3e444c6701dd5ce1: Status 404 returned error can't find the container with id e9841a0c7f510854c576afd65b9c3b5d76ce6f566f1701dd3e444c6701dd5ce1 Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.401051 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.401349 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.401664 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.401994 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.402610 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.402885 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.405065 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.405312 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.405636 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.405892 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.406497 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.406787 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.407025 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.407340 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.407734 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.407984 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.419646 4652 scope.go:117] "RemoveContainer" containerID="51e416e3bf24a793c42a809939cd70b14dd8fa3bb7bb96c32fa5963b47527074" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.450340 4652 scope.go:117] "RemoveContainer" containerID="33e2bc129354078f57bfb2e0fb2af44f1cba849f4a9b71d251a9bc5d21d1da37" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.470258 4652 scope.go:117] "RemoveContainer" containerID="1890aea170b9cf22bb5c53fa380a44f23441a15db8d068f9d62f83dffff7e884" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.485947 4652 scope.go:117] "RemoveContainer" containerID="b90c4b7009ba2a3a254c05ee2d9a81e3028ac45be892135063698a0050988911" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.502243 4652 scope.go:117] "RemoveContainer" containerID="4b7920819e88119e430c6d5c4ce429c6f36980b159ab94a3de480e14a8be621d" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.514599 4652 scope.go:117] "RemoveContainer" containerID="a4a22945a3877611e2c08799a2b437687c43db7a9e7d5847b99a02aafbf8d902" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.539161 4652 scope.go:117] "RemoveContainer" containerID="11c6ec112a0a3de59e9cc07bd9eb9d7ef7fa3e952e95f2ab989fe1fdfc81bd70" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.563012 4652 scope.go:117] "RemoveContainer" containerID="51489f10d8a546c0499b04ac30fa11d89fb7e1b252e32d1a085f71733bbd4eb1" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.583547 4652 scope.go:117] "RemoveContainer" containerID="ea9219695389343bdeb6730edde14a49b950ae21e38dc2f5424fa55fabab2e9c" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.607666 4652 scope.go:117] "RemoveContainer" containerID="0d6b305ac630f378d1758d62fbbbc341c0cd4cdb0fd06eea1cb914ff45812737" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.620724 4652 scope.go:117] "RemoveContainer" containerID="4d96ead2dc25a84f1f3fa60d9a89efe46f0794ed350d902d7691e93a428a1260" Dec 05 05:30:36 crc kubenswrapper[4652]: I1205 05:30:36.633856 4652 scope.go:117] "RemoveContainer" containerID="247c1da8c63c4f09cb40ab5d0d47c3def8e958284b5909553f07b38912b1c6be" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.416642 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.420687 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10"} Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.420746 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e9841a0c7f510854c576afd65b9c3b5d76ce6f566f1701dd3e444c6701dd5ce1"} Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.421503 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.421928 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.422390 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.422725 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.422960 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.423170 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.423369 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.423585 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/1.log" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.423664 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.424018 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/0.log" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.424060 4652 generic.go:334] "Generic (PLEG): container finished" podID="befe0b4e-bc8f-4a52-8485-aa822dc69415" containerID="7e0968472805d6a25494e4edb0c901e8053101653ed4b59c14192f9b3d902746" exitCode=1 Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.424093 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" event={"ID":"befe0b4e-bc8f-4a52-8485-aa822dc69415","Type":"ContainerDied","Data":"7e0968472805d6a25494e4edb0c901e8053101653ed4b59c14192f9b3d902746"} Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.424146 4652 scope.go:117] "RemoveContainer" containerID="e5ea423648088c9d1575861ccddc5203c5639d89d94cbf054e7c9762b6498e43" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.424440 4652 scope.go:117] "RemoveContainer" containerID="7e0968472805d6a25494e4edb0c901e8053101653ed4b59c14192f9b3d902746" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.424531 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: E1205 05:30:37.424650 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-mtc94_openshift-marketplace(befe0b4e-bc8f-4a52-8485-aa822dc69415)\"" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.424788 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.424975 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.425280 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.425729 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.426011 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.426339 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.426716 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.612804 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.613657 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.614140 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.614574 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.614851 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.615147 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.615458 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.615711 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.615988 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.725152 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-var-lock\") pod \"e0f7d739-f09a-454d-a2c7-baed080d73b3\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.725231 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-kubelet-dir\") pod \"e0f7d739-f09a-454d-a2c7-baed080d73b3\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.725258 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0f7d739-f09a-454d-a2c7-baed080d73b3-kube-api-access\") pod \"e0f7d739-f09a-454d-a2c7-baed080d73b3\" (UID: \"e0f7d739-f09a-454d-a2c7-baed080d73b3\") " Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.725279 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-var-lock" (OuterVolumeSpecName: "var-lock") pod "e0f7d739-f09a-454d-a2c7-baed080d73b3" (UID: "e0f7d739-f09a-454d-a2c7-baed080d73b3"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.725353 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e0f7d739-f09a-454d-a2c7-baed080d73b3" (UID: "e0f7d739-f09a-454d-a2c7-baed080d73b3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.725898 4652 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.725926 4652 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e0f7d739-f09a-454d-a2c7-baed080d73b3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.729711 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0f7d739-f09a-454d-a2c7-baed080d73b3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e0f7d739-f09a-454d-a2c7-baed080d73b3" (UID: "e0f7d739-f09a-454d-a2c7-baed080d73b3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:30:37 crc kubenswrapper[4652]: I1205 05:30:37.827040 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0f7d739-f09a-454d-a2c7-baed080d73b3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.129399 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.130760 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.131080 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.132178 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.132421 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.132661 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.132879 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.134503 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.431871 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/1.log" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.432335 4652 scope.go:117] "RemoveContainer" containerID="7e0968472805d6a25494e4edb0c901e8053101653ed4b59c14192f9b3d902746" Dec 05 05:30:38 crc kubenswrapper[4652]: E1205 05:30:38.432590 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-mtc94_openshift-marketplace(befe0b4e-bc8f-4a52-8485-aa822dc69415)\"" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.433137 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.433450 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.433814 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.434456 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.435040 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.435328 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.435408 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.435635 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.435960 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.436362 4652 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f" exitCode=0 Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.438290 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e0f7d739-f09a-454d-a2c7-baed080d73b3","Type":"ContainerDied","Data":"bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3"} Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.438317 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.438344 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.441889 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.442632 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.443261 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.443613 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.443953 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.444253 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.444609 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.444939 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.777130 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.778235 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.778919 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.779432 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.779793 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.780055 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.780364 4652 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.780695 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.780987 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.781203 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.781458 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.939904 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.940256 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.940040 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.940323 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.940358 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.940375 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.940593 4652 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.940610 4652 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:38 crc kubenswrapper[4652]: I1205 05:30:38.940620 4652 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.447302 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.448470 4652 scope.go:117] "RemoveContainer" containerID="335fb1d4eff534b0ea50f47eb50cf4e447143209e3f76536ffa03caa0b8557c7" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.448788 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.464627 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.465755 4652 scope.go:117] "RemoveContainer" containerID="cbdb61db70452526558cc86696199e91016ba7ed988ded34b1a17220d6f6ace9" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.465981 4652 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.467648 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.467986 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.468280 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.468849 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.469090 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.469625 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.469977 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.477255 4652 scope.go:117] "RemoveContainer" containerID="4ae1735350dfa0ea08ac24478cac2649664de3a49bdb28251775854947587c59" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.491107 4652 scope.go:117] "RemoveContainer" containerID="d3f60434cdd11587ffd251bfc9aa30301aa6191cca52fd2ee9c4f94cc00fd32e" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.502174 4652 scope.go:117] "RemoveContainer" containerID="5378012c9d584c124ee66a3989087faaab8ac495320f4efdaddc14133a8d886f" Dec 05 05:30:39 crc kubenswrapper[4652]: I1205 05:30:39.519766 4652 scope.go:117] "RemoveContainer" containerID="404cc28a244322bb9a6c7cab421756c50fb843d669adf3906318d590ca7d9d6f" Dec 05 05:30:39 crc kubenswrapper[4652]: E1205 05:30:39.801428 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice/crio-bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:30:40 crc kubenswrapper[4652]: I1205 05:30:40.132629 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 05:30:40 crc kubenswrapper[4652]: E1205 05:30:40.825024 4652 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events/marketplace-operator-79b997595-mtc94.187e3ab101e66220\": dial tcp 192.168.25.93:6443: connect: connection refused" event="&Event{ObjectMeta:{marketplace-operator-79b997595-mtc94.187e3ab101e66220 openshift-marketplace 29460 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-mtc94,UID:befe0b4e-bc8f-4a52-8485-aa822dc69415,APIVersion:v1,ResourceVersion:29429,FieldPath:spec.containers{marketplace-operator},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:30:35 +0000 UTC,LastTimestamp:2025-12-05 05:30:36.387669502 +0000 UTC m=+238.624399769,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:30:41 crc kubenswrapper[4652]: E1205 05:30:41.176019 4652 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 192.168.25.93:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" volumeName="registry-storage" Dec 05 05:30:42 crc kubenswrapper[4652]: E1205 05:30:42.390769 4652 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:42 crc kubenswrapper[4652]: E1205 05:30:42.391180 4652 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:42 crc kubenswrapper[4652]: E1205 05:30:42.391604 4652 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:42 crc kubenswrapper[4652]: E1205 05:30:42.391907 4652 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:42 crc kubenswrapper[4652]: E1205 05:30:42.392225 4652 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:42 crc kubenswrapper[4652]: I1205 05:30:42.392257 4652 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 05:30:42 crc kubenswrapper[4652]: E1205 05:30:42.392535 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="200ms" Dec 05 05:30:42 crc kubenswrapper[4652]: E1205 05:30:42.593395 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="400ms" Dec 05 05:30:42 crc kubenswrapper[4652]: E1205 05:30:42.994434 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="800ms" Dec 05 05:30:43 crc kubenswrapper[4652]: E1205 05:30:43.103643 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:30:43Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:30:43Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:30:43Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:30:43Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:43 crc kubenswrapper[4652]: E1205 05:30:43.103980 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:43 crc kubenswrapper[4652]: E1205 05:30:43.104489 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:43 crc kubenswrapper[4652]: E1205 05:30:43.105051 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:43 crc kubenswrapper[4652]: E1205 05:30:43.105358 4652 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:43 crc kubenswrapper[4652]: E1205 05:30:43.105390 4652 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:30:43 crc kubenswrapper[4652]: E1205 05:30:43.796168 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="1.6s" Dec 05 05:30:45 crc kubenswrapper[4652]: E1205 05:30:45.397597 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="3.2s" Dec 05 05:30:45 crc kubenswrapper[4652]: I1205 05:30:45.446291 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:45 crc kubenswrapper[4652]: I1205 05:30:45.446367 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:30:45 crc kubenswrapper[4652]: I1205 05:30:45.446912 4652 scope.go:117] "RemoveContainer" containerID="7e0968472805d6a25494e4edb0c901e8053101653ed4b59c14192f9b3d902746" Dec 05 05:30:45 crc kubenswrapper[4652]: E1205 05:30:45.447160 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-mtc94_openshift-marketplace(befe0b4e-bc8f-4a52-8485-aa822dc69415)\"" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" Dec 05 05:30:48 crc kubenswrapper[4652]: I1205 05:30:48.128323 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:48 crc kubenswrapper[4652]: I1205 05:30:48.129927 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:48 crc kubenswrapper[4652]: I1205 05:30:48.130457 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:48 crc kubenswrapper[4652]: I1205 05:30:48.130822 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:48 crc kubenswrapper[4652]: I1205 05:30:48.131069 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:48 crc kubenswrapper[4652]: I1205 05:30:48.131261 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:48 crc kubenswrapper[4652]: I1205 05:30:48.131517 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:48 crc kubenswrapper[4652]: I1205 05:30:48.131859 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:48 crc kubenswrapper[4652]: E1205 05:30:48.598474 4652 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.93:6443: connect: connection refused" interval="6.4s" Dec 05 05:30:49 crc kubenswrapper[4652]: E1205 05:30:49.899486 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice/crio-bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.125140 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.126789 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.127038 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.127254 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.127489 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.127701 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.127872 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.128047 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.128243 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.137812 4652 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.137841 4652 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:50 crc kubenswrapper[4652]: E1205 05:30:50.138193 4652 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.138543 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.503569 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.503626 4652 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d" exitCode=1 Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.503724 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d"} Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.504100 4652 scope.go:117] "RemoveContainer" containerID="84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.504779 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505086 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505268 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505479 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505639 4652 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="79aa41d3b63a2c4299b4d6810fa05c5ab619992c9b42076f5cb221114e6f594d" exitCode=0 Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505667 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"79aa41d3b63a2c4299b4d6810fa05c5ab619992c9b42076f5cb221114e6f594d"} Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505686 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e5d0ff46597f9d3f36018cedbc5be507a654afbd285cde2a89cd680165dcaa41"} Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505711 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505862 4652 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505881 4652 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.505880 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: E1205 05:30:50.506058 4652 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.506131 4652 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.506359 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.506548 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.506818 4652 status_manager.go:851] "Failed to get status for pod" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" pod="openshift-marketplace/redhat-operators-m2vzf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-m2vzf\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.507037 4652 status_manager.go:851] "Failed to get status for pod" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" pod="openshift-marketplace/marketplace-operator-79b997595-5ksc5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-5ksc5\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.507355 4652 status_manager.go:851] "Failed to get status for pod" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" pod="openshift-marketplace/community-operators-fpx88" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fpx88\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.507771 4652 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.508021 4652 status_manager.go:851] "Failed to get status for pod" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.508235 4652 status_manager.go:851] "Failed to get status for pod" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" pod="openshift-marketplace/certified-operators-77ccc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-77ccc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.508452 4652 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.508665 4652 status_manager.go:851] "Failed to get status for pod" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" pod="openshift-marketplace/redhat-marketplace-28xkx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-28xkx\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:50 crc kubenswrapper[4652]: I1205 05:30:50.508871 4652 status_manager.go:851] "Failed to get status for pod" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-mtc94\": dial tcp 192.168.25.93:6443: connect: connection refused" Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.516782 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7f87f55de5732a41c96d4e37c6451988d883b559fbc64d557ed148e447a318e3"} Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.517253 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"45b7a3ccd5e95fe0e82a860b1ab000cb1e205c98c85f495fcc685c97f2482bae"} Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.517266 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5e8c9c896f5d3997bc4121328be443b6e2d9412c131fb600da52c8f637591566"} Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.517287 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7ffa7a180d3f59d4827fb3a27585387a3744c86378fb033a6feaa5c65a78ec77"} Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.517297 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2b1464a1aa2eafe3c0ddbaf9c744ecbb0c8b1c93ef8371e2308b63e07c9c5770"} Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.517589 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.517647 4652 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.517667 4652 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.522108 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 05:30:51 crc kubenswrapper[4652]: I1205 05:30:51.522180 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7308a8f98eeffa06920660c64c1c627bb3f4c2e3cda9f126e592057d3960b825"} Dec 05 05:30:53 crc kubenswrapper[4652]: I1205 05:30:53.092006 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:30:53 crc kubenswrapper[4652]: I1205 05:30:53.723815 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:30:53 crc kubenswrapper[4652]: I1205 05:30:53.723967 4652 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 05:30:53 crc kubenswrapper[4652]: I1205 05:30:53.724016 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 05:30:55 crc kubenswrapper[4652]: I1205 05:30:55.139750 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:55 crc kubenswrapper[4652]: I1205 05:30:55.140844 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:55 crc kubenswrapper[4652]: I1205 05:30:55.144119 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:57 crc kubenswrapper[4652]: I1205 05:30:57.007901 4652 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:57 crc kubenswrapper[4652]: I1205 05:30:57.550908 4652 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:57 crc kubenswrapper[4652]: I1205 05:30:57.550942 4652 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:57 crc kubenswrapper[4652]: I1205 05:30:57.554526 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.132382 4652 scope.go:117] "RemoveContainer" containerID="7e0968472805d6a25494e4edb0c901e8053101653ed4b59c14192f9b3d902746" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.148386 4652 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8e8b0010-0284-4244-afcd-a14fa7ec5e39" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.557451 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/2.log" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.558361 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/1.log" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.558476 4652 generic.go:334] "Generic (PLEG): container finished" podID="befe0b4e-bc8f-4a52-8485-aa822dc69415" containerID="4bfdac0aaabc9dfdd27bb4f24a27f9dc1ef39bd2cd6a2a17931a0469c2f46d87" exitCode=1 Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.558613 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" event={"ID":"befe0b4e-bc8f-4a52-8485-aa822dc69415","Type":"ContainerDied","Data":"4bfdac0aaabc9dfdd27bb4f24a27f9dc1ef39bd2cd6a2a17931a0469c2f46d87"} Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.558720 4652 scope.go:117] "RemoveContainer" containerID="7e0968472805d6a25494e4edb0c901e8053101653ed4b59c14192f9b3d902746" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.559120 4652 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.559192 4652 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7692849e-585c-4b9e-8f28-c4b3f677f356" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.559168 4652 scope.go:117] "RemoveContainer" containerID="4bfdac0aaabc9dfdd27bb4f24a27f9dc1ef39bd2cd6a2a17931a0469c2f46d87" Dec 05 05:30:58 crc kubenswrapper[4652]: E1205 05:30:58.559543 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-mtc94_openshift-marketplace(befe0b4e-bc8f-4a52-8485-aa822dc69415)\"" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" Dec 05 05:30:58 crc kubenswrapper[4652]: I1205 05:30:58.565145 4652 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8e8b0010-0284-4244-afcd-a14fa7ec5e39" Dec 05 05:30:59 crc kubenswrapper[4652]: I1205 05:30:59.564067 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/2.log" Dec 05 05:30:59 crc kubenswrapper[4652]: E1205 05:30:59.994508 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice/crio-bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:31:03 crc kubenswrapper[4652]: I1205 05:31:03.724942 4652 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 05:31:03 crc kubenswrapper[4652]: I1205 05:31:03.725351 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 05:31:05 crc kubenswrapper[4652]: I1205 05:31:05.445861 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:31:05 crc kubenswrapper[4652]: I1205 05:31:05.445937 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:31:05 crc kubenswrapper[4652]: I1205 05:31:05.446519 4652 scope.go:117] "RemoveContainer" containerID="4bfdac0aaabc9dfdd27bb4f24a27f9dc1ef39bd2cd6a2a17931a0469c2f46d87" Dec 05 05:31:05 crc kubenswrapper[4652]: E1205 05:31:05.446894 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-mtc94_openshift-marketplace(befe0b4e-bc8f-4a52-8485-aa822dc69415)\"" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" Dec 05 05:31:07 crc kubenswrapper[4652]: I1205 05:31:07.253398 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 05:31:07 crc kubenswrapper[4652]: I1205 05:31:07.407948 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 05:31:07 crc kubenswrapper[4652]: I1205 05:31:07.776779 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 05:31:08 crc kubenswrapper[4652]: I1205 05:31:08.460782 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 05:31:08 crc kubenswrapper[4652]: I1205 05:31:08.715094 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 05:31:08 crc kubenswrapper[4652]: I1205 05:31:08.723992 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 05:31:08 crc kubenswrapper[4652]: I1205 05:31:08.776765 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 05:31:08 crc kubenswrapper[4652]: I1205 05:31:08.896851 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 05:31:08 crc kubenswrapper[4652]: I1205 05:31:08.913949 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.061370 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.508430 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.524693 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.569353 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.607887 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.717517 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.845476 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.868605 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.937569 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 05:31:09 crc kubenswrapper[4652]: I1205 05:31:09.971984 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.066347 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 05:31:10 crc kubenswrapper[4652]: E1205 05:31:10.099450 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice/crio-bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.110012 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.150581 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.196313 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.499724 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.502427 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.581698 4652 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.598028 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.702001 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.779611 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.900907 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.907472 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 05:31:10 crc kubenswrapper[4652]: I1205 05:31:10.922944 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.003332 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.045829 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.097201 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.259638 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.291723 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.296860 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.398467 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.431946 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.456726 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.494627 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.503749 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.613253 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.679455 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.680406 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.680900 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.784301 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.798688 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.859245 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.923224 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.929008 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.945922 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.955526 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 05:31:11 crc kubenswrapper[4652]: I1205 05:31:11.976031 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.071658 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.104398 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.120126 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.124100 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.173709 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.228331 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.333078 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.452531 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.484023 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.517644 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.565482 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.644599 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.673736 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.679870 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.698498 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.757581 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.762410 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.823488 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.865665 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.868859 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.894947 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.921678 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.940702 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.959653 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.967852 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:31:12 crc kubenswrapper[4652]: I1205 05:31:12.972094 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.042966 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.074851 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.162489 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.193626 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.198845 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.350859 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.351201 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.358307 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.365538 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.455889 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.499143 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.529705 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.605914 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.654435 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.692845 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.723841 4652 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.723906 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.723972 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.724602 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"7308a8f98eeffa06920660c64c1c627bb3f4c2e3cda9f126e592057d3960b825"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.724708 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://7308a8f98eeffa06920660c64c1c627bb3f4c2e3cda9f126e592057d3960b825" gracePeriod=30 Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.748332 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.814223 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.884746 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.970909 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 05:31:13 crc kubenswrapper[4652]: I1205 05:31:13.994341 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.018169 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.070871 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.073834 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.153642 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.287036 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.383842 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.438969 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.473583 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.480386 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.521026 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.565528 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.576773 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.584643 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.590926 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.787004 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.788696 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.814418 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.823196 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.828869 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 05:31:14 crc kubenswrapper[4652]: I1205 05:31:14.932742 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.008444 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.023439 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.028441 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.035472 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.052771 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.082610 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.126977 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.138265 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.209890 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.246346 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.256102 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.394734 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.411039 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.453904 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.496178 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.588073 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.679278 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.776209 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 05:31:15 crc kubenswrapper[4652]: I1205 05:31:15.829996 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.032065 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.098882 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.104945 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.173483 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.242577 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.252969 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.338037 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.394331 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.396723 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.399432 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.453707 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.600283 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.649756 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.757740 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.793250 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.831950 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.909276 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 05:31:16 crc kubenswrapper[4652]: I1205 05:31:16.926607 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.009799 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.051587 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.065629 4652 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.069349 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=41.069330085 podStartE2EDuration="41.069330085s" podCreationTimestamp="2025-12-05 05:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:30:57.07122056 +0000 UTC m=+259.307950827" watchObservedRunningTime="2025-12-05 05:31:17.069330085 +0000 UTC m=+279.306060352" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.070775 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fpx88","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/redhat-operators-m2vzf","openshift-marketplace/marketplace-operator-79b997595-5ksc5","openshift-marketplace/redhat-marketplace-28xkx","openshift-marketplace/certified-operators-77ccc"] Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.070866 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.076129 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.093166 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=20.093142391 podStartE2EDuration="20.093142391s" podCreationTimestamp="2025-12-05 05:30:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:31:17.086893759 +0000 UTC m=+279.323624026" watchObservedRunningTime="2025-12-05 05:31:17.093142391 +0000 UTC m=+279.329872658" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.125459 4652 scope.go:117] "RemoveContainer" containerID="4bfdac0aaabc9dfdd27bb4f24a27f9dc1ef39bd2cd6a2a17931a0469c2f46d87" Dec 05 05:31:17 crc kubenswrapper[4652]: E1205 05:31:17.125728 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-mtc94_openshift-marketplace(befe0b4e-bc8f-4a52-8485-aa822dc69415)\"" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" podUID="befe0b4e-bc8f-4a52-8485-aa822dc69415" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.229074 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.290810 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.364109 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.414874 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.417498 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.453095 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.472488 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.480293 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.499760 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.515432 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.767699 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.864391 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.917185 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 05:31:17 crc kubenswrapper[4652]: I1205 05:31:17.939081 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.020667 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.041663 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.041761 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.045600 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.126980 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.131181 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05d92c8c-f3b8-44ac-8657-74e7a6af4507" path="/var/lib/kubelet/pods/05d92c8c-f3b8-44ac-8657-74e7a6af4507/volumes" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.131769 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="458b1b02-8fd7-4d72-877e-03cd99ee1ae1" path="/var/lib/kubelet/pods/458b1b02-8fd7-4d72-877e-03cd99ee1ae1/volumes" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.132310 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96d794c5-fa0a-4763-97b3-0ea7b0ff45c4" path="/var/lib/kubelet/pods/96d794c5-fa0a-4763-97b3-0ea7b0ff45c4/volumes" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.133337 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8b9d5f4-186a-4646-ab32-0f3c63e23676" path="/var/lib/kubelet/pods/d8b9d5f4-186a-4646-ab32-0f3c63e23676/volumes" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.133768 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa397388-ad3b-4e68-8be2-a224f603593e" path="/var/lib/kubelet/pods/fa397388-ad3b-4e68-8be2-a224f603593e/volumes" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.163803 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.213790 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.268317 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.342267 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.348373 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.494860 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.568747 4652 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.596069 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.605123 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.646750 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.782692 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 05:31:18 crc kubenswrapper[4652]: I1205 05:31:18.995727 4652 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.150760 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.169655 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.260802 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.420931 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.439490 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.485409 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.501430 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.559051 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.629373 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.631425 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.673139 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.684660 4652 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.684880 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10" gracePeriod=5 Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.708747 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.711374 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 05:31:19 crc kubenswrapper[4652]: I1205 05:31:19.809524 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.000835 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.034174 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.061806 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.209161 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.216567 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 05:31:20 crc kubenswrapper[4652]: E1205 05:31:20.221288 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice/crio-bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.564493 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.572723 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.606148 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.691898 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.730007 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.759065 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.779706 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 05:31:20 crc kubenswrapper[4652]: I1205 05:31:20.932586 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.054697 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.074242 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.302270 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.342262 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.447241 4652 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.477382 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.512439 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.749492 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.795855 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.828103 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.913892 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 05:31:21 crc kubenswrapper[4652]: I1205 05:31:21.951932 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.184258 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.195879 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.606172 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.619788 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.658837 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.739095 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.772292 4652 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.786055 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.885230 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 05:31:22 crc kubenswrapper[4652]: I1205 05:31:22.954473 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:31:23 crc kubenswrapper[4652]: I1205 05:31:23.144379 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 05:31:23 crc kubenswrapper[4652]: I1205 05:31:23.218045 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 05:31:23 crc kubenswrapper[4652]: I1205 05:31:23.347910 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 05:31:23 crc kubenswrapper[4652]: I1205 05:31:23.671738 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 05:31:23 crc kubenswrapper[4652]: I1205 05:31:23.818253 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 05:31:23 crc kubenswrapper[4652]: I1205 05:31:23.897290 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 05:31:24 crc kubenswrapper[4652]: I1205 05:31:24.824671 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.243008 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.243086 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.405721 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.405798 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.405917 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.405986 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.406029 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.406192 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.406403 4652 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.406481 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.406524 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.406545 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.415953 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.508694 4652 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.508728 4652 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.508738 4652 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.508748 4652 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.715906 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.716179 4652 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10" exitCode=137 Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.716235 4652 scope.go:117] "RemoveContainer" containerID="c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.716393 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.744675 4652 scope.go:117] "RemoveContainer" containerID="c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10" Dec 05 05:31:25 crc kubenswrapper[4652]: E1205 05:31:25.745185 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10\": container with ID starting with c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10 not found: ID does not exist" containerID="c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10" Dec 05 05:31:25 crc kubenswrapper[4652]: I1205 05:31:25.747113 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10"} err="failed to get container status \"c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10\": rpc error: code = NotFound desc = could not find container \"c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10\": container with ID starting with c16390d0d069201ef93990d83182c5118d5f832ddb509a57f416ba1e01b76c10 not found: ID does not exist" Dec 05 05:31:26 crc kubenswrapper[4652]: I1205 05:31:26.132076 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 05:31:26 crc kubenswrapper[4652]: I1205 05:31:26.132342 4652 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 05 05:31:26 crc kubenswrapper[4652]: I1205 05:31:26.143059 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:31:26 crc kubenswrapper[4652]: I1205 05:31:26.143105 4652 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="e8d6492a-9124-4e7f-9db7-af7a7fd7eae2" Dec 05 05:31:26 crc kubenswrapper[4652]: I1205 05:31:26.145499 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:31:26 crc kubenswrapper[4652]: I1205 05:31:26.145530 4652 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="e8d6492a-9124-4e7f-9db7-af7a7fd7eae2" Dec 05 05:31:30 crc kubenswrapper[4652]: I1205 05:31:30.125930 4652 scope.go:117] "RemoveContainer" containerID="4bfdac0aaabc9dfdd27bb4f24a27f9dc1ef39bd2cd6a2a17931a0469c2f46d87" Dec 05 05:31:30 crc kubenswrapper[4652]: E1205 05:31:30.322780 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice/crio-bab27affc49a026cd83a990a04f8416615d614a2d3d0ec982e1a75d5d8a976b3\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-pode0f7d739_f09a_454d_a2c7_baed080d73b3.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:31:30 crc kubenswrapper[4652]: I1205 05:31:30.744045 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/2.log" Dec 05 05:31:30 crc kubenswrapper[4652]: I1205 05:31:30.744121 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" event={"ID":"befe0b4e-bc8f-4a52-8485-aa822dc69415","Type":"ContainerStarted","Data":"f84233c1e98ec8b226a33b7ade7c2b86dbb67416e68cd00efcbe31a8526c7f00"} Dec 05 05:31:30 crc kubenswrapper[4652]: I1205 05:31:30.744482 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:31:30 crc kubenswrapper[4652]: I1205 05:31:30.752596 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" Dec 05 05:31:30 crc kubenswrapper[4652]: I1205 05:31:30.765518 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mtc94" podStartSLOduration=55.765494683 podStartE2EDuration="55.765494683s" podCreationTimestamp="2025-12-05 05:30:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:31:30.762259238 +0000 UTC m=+292.998989505" watchObservedRunningTime="2025-12-05 05:31:30.765494683 +0000 UTC m=+293.002224950" Dec 05 05:31:43 crc kubenswrapper[4652]: I1205 05:31:43.813894 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 05:31:43 crc kubenswrapper[4652]: I1205 05:31:43.816011 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 05:31:43 crc kubenswrapper[4652]: I1205 05:31:43.816131 4652 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="7308a8f98eeffa06920660c64c1c627bb3f4c2e3cda9f126e592057d3960b825" exitCode=137 Dec 05 05:31:43 crc kubenswrapper[4652]: I1205 05:31:43.816171 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"7308a8f98eeffa06920660c64c1c627bb3f4c2e3cda9f126e592057d3960b825"} Dec 05 05:31:43 crc kubenswrapper[4652]: I1205 05:31:43.816219 4652 scope.go:117] "RemoveContainer" containerID="84629f98cc087535bffc3ea77e6f68628d321155438f7e01f6c363707b440a7d" Dec 05 05:31:44 crc kubenswrapper[4652]: I1205 05:31:44.824962 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 05:31:44 crc kubenswrapper[4652]: I1205 05:31:44.826215 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9290347db73d1418388e123791c652f0b59ab040e85f440ba10af1d589b7da40"} Dec 05 05:31:48 crc kubenswrapper[4652]: I1205 05:31:48.987232 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-84w6t"] Dec 05 05:31:48 crc kubenswrapper[4652]: E1205 05:31:48.987713 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 05:31:48 crc kubenswrapper[4652]: I1205 05:31:48.987727 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 05:31:48 crc kubenswrapper[4652]: E1205 05:31:48.987741 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" containerName="installer" Dec 05 05:31:48 crc kubenswrapper[4652]: I1205 05:31:48.987747 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" containerName="installer" Dec 05 05:31:48 crc kubenswrapper[4652]: I1205 05:31:48.987834 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 05:31:48 crc kubenswrapper[4652]: I1205 05:31:48.987850 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0f7d739-f09a-454d-a2c7-baed080d73b3" containerName="installer" Dec 05 05:31:48 crc kubenswrapper[4652]: I1205 05:31:48.988508 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:48 crc kubenswrapper[4652]: I1205 05:31:48.993099 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.035191 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-84w6t"] Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.161056 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6bd24fa-eaf3-430a-bfae-7230e901b63d-catalog-content\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.161156 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fncbt\" (UniqueName: \"kubernetes.io/projected/c6bd24fa-eaf3-430a-bfae-7230e901b63d-kube-api-access-fncbt\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.161194 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6bd24fa-eaf3-430a-bfae-7230e901b63d-utilities\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.170682 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xz9mv"] Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.172483 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.174224 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.190351 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xz9mv"] Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.263001 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c1ac30-0a51-4501-94ce-53183bf948cf-catalog-content\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.263098 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6bd24fa-eaf3-430a-bfae-7230e901b63d-catalog-content\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.263160 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j48m8\" (UniqueName: \"kubernetes.io/projected/91c1ac30-0a51-4501-94ce-53183bf948cf-kube-api-access-j48m8\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.263194 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fncbt\" (UniqueName: \"kubernetes.io/projected/c6bd24fa-eaf3-430a-bfae-7230e901b63d-kube-api-access-fncbt\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.263232 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6bd24fa-eaf3-430a-bfae-7230e901b63d-utilities\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.263252 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c1ac30-0a51-4501-94ce-53183bf948cf-utilities\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.263648 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6bd24fa-eaf3-430a-bfae-7230e901b63d-catalog-content\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.263977 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6bd24fa-eaf3-430a-bfae-7230e901b63d-utilities\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.281395 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fncbt\" (UniqueName: \"kubernetes.io/projected/c6bd24fa-eaf3-430a-bfae-7230e901b63d-kube-api-access-fncbt\") pod \"redhat-operators-84w6t\" (UID: \"c6bd24fa-eaf3-430a-bfae-7230e901b63d\") " pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.299735 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.365374 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c1ac30-0a51-4501-94ce-53183bf948cf-catalog-content\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.365645 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j48m8\" (UniqueName: \"kubernetes.io/projected/91c1ac30-0a51-4501-94ce-53183bf948cf-kube-api-access-j48m8\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.365685 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c1ac30-0a51-4501-94ce-53183bf948cf-utilities\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.365745 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91c1ac30-0a51-4501-94ce-53183bf948cf-catalog-content\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.366026 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91c1ac30-0a51-4501-94ce-53183bf948cf-utilities\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.384224 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j48m8\" (UniqueName: \"kubernetes.io/projected/91c1ac30-0a51-4501-94ce-53183bf948cf-kube-api-access-j48m8\") pod \"redhat-marketplace-xz9mv\" (UID: \"91c1ac30-0a51-4501-94ce-53183bf948cf\") " pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.487902 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.651454 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xz9mv"] Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.656270 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-84w6t"] Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.856876 4652 generic.go:334] "Generic (PLEG): container finished" podID="c6bd24fa-eaf3-430a-bfae-7230e901b63d" containerID="5f4f4c9e3559c7217c66563b600a2cbb2609bce8a2a142a35e01e5c12c7552b2" exitCode=0 Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.856955 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84w6t" event={"ID":"c6bd24fa-eaf3-430a-bfae-7230e901b63d","Type":"ContainerDied","Data":"5f4f4c9e3559c7217c66563b600a2cbb2609bce8a2a142a35e01e5c12c7552b2"} Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.857140 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84w6t" event={"ID":"c6bd24fa-eaf3-430a-bfae-7230e901b63d","Type":"ContainerStarted","Data":"daa5798d62ef4913c1945766ae4bedcbc6506687d6c2756cf1004d6d2c24c3be"} Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.859417 4652 generic.go:334] "Generic (PLEG): container finished" podID="91c1ac30-0a51-4501-94ce-53183bf948cf" containerID="06cfc187c6ec05f7d2e2cf0d2092574bb5a93c7a5dcccca6e8f6ecd2f61f4c9c" exitCode=0 Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.859479 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xz9mv" event={"ID":"91c1ac30-0a51-4501-94ce-53183bf948cf","Type":"ContainerDied","Data":"06cfc187c6ec05f7d2e2cf0d2092574bb5a93c7a5dcccca6e8f6ecd2f61f4c9c"} Dec 05 05:31:49 crc kubenswrapper[4652]: I1205 05:31:49.859517 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xz9mv" event={"ID":"91c1ac30-0a51-4501-94ce-53183bf948cf","Type":"ContainerStarted","Data":"929d96ec68d8071e0dfa33a0ef0e784f1a64f05ea34271a40e12314b8453dd47"} Dec 05 05:31:50 crc kubenswrapper[4652]: I1205 05:31:50.867146 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84w6t" event={"ID":"c6bd24fa-eaf3-430a-bfae-7230e901b63d","Type":"ContainerStarted","Data":"db0fd41c4940d5cf8d8c9ec52cea28b6f8978df9949f13e62083024d8683d058"} Dec 05 05:31:50 crc kubenswrapper[4652]: I1205 05:31:50.870836 4652 generic.go:334] "Generic (PLEG): container finished" podID="91c1ac30-0a51-4501-94ce-53183bf948cf" containerID="83e66ab46a63bb8f2316e6c286676e062a51bee22683cfd40d03b185528cbc6c" exitCode=0 Dec 05 05:31:50 crc kubenswrapper[4652]: I1205 05:31:50.870914 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xz9mv" event={"ID":"91c1ac30-0a51-4501-94ce-53183bf948cf","Type":"ContainerDied","Data":"83e66ab46a63bb8f2316e6c286676e062a51bee22683cfd40d03b185528cbc6c"} Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.371548 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-txw9m"] Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.372966 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.375644 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.383367 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-txw9m"] Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.400293 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/712da180-0e95-46d1-ae94-66811f03cf96-catalog-content\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.400355 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/712da180-0e95-46d1-ae94-66811f03cf96-utilities\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.400411 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pplgx\" (UniqueName: \"kubernetes.io/projected/712da180-0e95-46d1-ae94-66811f03cf96-kube-api-access-pplgx\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.501490 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pplgx\" (UniqueName: \"kubernetes.io/projected/712da180-0e95-46d1-ae94-66811f03cf96-kube-api-access-pplgx\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.501571 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/712da180-0e95-46d1-ae94-66811f03cf96-catalog-content\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.501604 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/712da180-0e95-46d1-ae94-66811f03cf96-utilities\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.502000 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/712da180-0e95-46d1-ae94-66811f03cf96-catalog-content\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.502044 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/712da180-0e95-46d1-ae94-66811f03cf96-utilities\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.517264 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pplgx\" (UniqueName: \"kubernetes.io/projected/712da180-0e95-46d1-ae94-66811f03cf96-kube-api-access-pplgx\") pod \"certified-operators-txw9m\" (UID: \"712da180-0e95-46d1-ae94-66811f03cf96\") " pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.567188 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-skn8m"] Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.568135 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.570702 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.575586 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-skn8m"] Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.602518 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwzzj\" (UniqueName: \"kubernetes.io/projected/f27dfcad-0744-4e4b-afd8-602951a7e2a7-kube-api-access-qwzzj\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.602595 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-catalog-content\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.602649 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-utilities\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.689371 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.703762 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwzzj\" (UniqueName: \"kubernetes.io/projected/f27dfcad-0744-4e4b-afd8-602951a7e2a7-kube-api-access-qwzzj\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.703866 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-catalog-content\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.703912 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-utilities\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.704497 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-catalog-content\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.704542 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-utilities\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.719918 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwzzj\" (UniqueName: \"kubernetes.io/projected/f27dfcad-0744-4e4b-afd8-602951a7e2a7-kube-api-access-qwzzj\") pod \"community-operators-skn8m\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.852107 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-txw9m"] Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.879714 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.886260 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xz9mv" event={"ID":"91c1ac30-0a51-4501-94ce-53183bf948cf","Type":"ContainerStarted","Data":"69d57d64db0cccf44cf3e951b4e7e1648bc4cd606ad6687f9b77714519fbec0b"} Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.889934 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txw9m" event={"ID":"712da180-0e95-46d1-ae94-66811f03cf96","Type":"ContainerStarted","Data":"f79af9e4481b84185f41c2af1ca564173eb765890b0ce161fcd79a4b852399a7"} Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.892427 4652 generic.go:334] "Generic (PLEG): container finished" podID="c6bd24fa-eaf3-430a-bfae-7230e901b63d" containerID="db0fd41c4940d5cf8d8c9ec52cea28b6f8978df9949f13e62083024d8683d058" exitCode=0 Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.892455 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84w6t" event={"ID":"c6bd24fa-eaf3-430a-bfae-7230e901b63d","Type":"ContainerDied","Data":"db0fd41c4940d5cf8d8c9ec52cea28b6f8978df9949f13e62083024d8683d058"} Dec 05 05:31:51 crc kubenswrapper[4652]: I1205 05:31:51.904712 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xz9mv" podStartSLOduration=1.394608614 podStartE2EDuration="2.904693876s" podCreationTimestamp="2025-12-05 05:31:49 +0000 UTC" firstStartedPulling="2025-12-05 05:31:49.860850559 +0000 UTC m=+312.097580826" lastFinishedPulling="2025-12-05 05:31:51.370935821 +0000 UTC m=+313.607666088" observedRunningTime="2025-12-05 05:31:51.901178235 +0000 UTC m=+314.137908501" watchObservedRunningTime="2025-12-05 05:31:51.904693876 +0000 UTC m=+314.141424143" Dec 05 05:31:52 crc kubenswrapper[4652]: I1205 05:31:52.045463 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-skn8m"] Dec 05 05:31:52 crc kubenswrapper[4652]: W1205 05:31:52.055147 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf27dfcad_0744_4e4b_afd8_602951a7e2a7.slice/crio-ccfec55b3f09d8af36e230b82fb3ca58bc7fe354f594bb3c74c3a26fba689f6a WatchSource:0}: Error finding container ccfec55b3f09d8af36e230b82fb3ca58bc7fe354f594bb3c74c3a26fba689f6a: Status 404 returned error can't find the container with id ccfec55b3f09d8af36e230b82fb3ca58bc7fe354f594bb3c74c3a26fba689f6a Dec 05 05:31:52 crc kubenswrapper[4652]: I1205 05:31:52.901787 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84w6t" event={"ID":"c6bd24fa-eaf3-430a-bfae-7230e901b63d","Type":"ContainerStarted","Data":"2f557cfdb78410738d410f2678c7cbe3066f4c48d739bfdc9cb61fc900ba76dd"} Dec 05 05:31:52 crc kubenswrapper[4652]: I1205 05:31:52.903702 4652 generic.go:334] "Generic (PLEG): container finished" podID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerID="1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a" exitCode=0 Dec 05 05:31:52 crc kubenswrapper[4652]: I1205 05:31:52.903761 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skn8m" event={"ID":"f27dfcad-0744-4e4b-afd8-602951a7e2a7","Type":"ContainerDied","Data":"1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a"} Dec 05 05:31:52 crc kubenswrapper[4652]: I1205 05:31:52.903833 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skn8m" event={"ID":"f27dfcad-0744-4e4b-afd8-602951a7e2a7","Type":"ContainerStarted","Data":"ccfec55b3f09d8af36e230b82fb3ca58bc7fe354f594bb3c74c3a26fba689f6a"} Dec 05 05:31:52 crc kubenswrapper[4652]: I1205 05:31:52.905888 4652 generic.go:334] "Generic (PLEG): container finished" podID="712da180-0e95-46d1-ae94-66811f03cf96" containerID="e3ae03e3c4459e8ca04b1e5852a432732e149520880f23e4960a24ff37db50ff" exitCode=0 Dec 05 05:31:52 crc kubenswrapper[4652]: I1205 05:31:52.905921 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txw9m" event={"ID":"712da180-0e95-46d1-ae94-66811f03cf96","Type":"ContainerDied","Data":"e3ae03e3c4459e8ca04b1e5852a432732e149520880f23e4960a24ff37db50ff"} Dec 05 05:31:52 crc kubenswrapper[4652]: I1205 05:31:52.917778 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-84w6t" podStartSLOduration=2.2718601019999998 podStartE2EDuration="4.917759338s" podCreationTimestamp="2025-12-05 05:31:48 +0000 UTC" firstStartedPulling="2025-12-05 05:31:49.85877187 +0000 UTC m=+312.095502137" lastFinishedPulling="2025-12-05 05:31:52.504671106 +0000 UTC m=+314.741401373" observedRunningTime="2025-12-05 05:31:52.916811365 +0000 UTC m=+315.153541632" watchObservedRunningTime="2025-12-05 05:31:52.917759338 +0000 UTC m=+315.154489605" Dec 05 05:31:53 crc kubenswrapper[4652]: I1205 05:31:53.092649 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:31:53 crc kubenswrapper[4652]: I1205 05:31:53.723694 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:31:53 crc kubenswrapper[4652]: I1205 05:31:53.727755 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:31:53 crc kubenswrapper[4652]: I1205 05:31:53.914353 4652 generic.go:334] "Generic (PLEG): container finished" podID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerID="935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb" exitCode=0 Dec 05 05:31:53 crc kubenswrapper[4652]: I1205 05:31:53.914423 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skn8m" event={"ID":"f27dfcad-0744-4e4b-afd8-602951a7e2a7","Type":"ContainerDied","Data":"935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb"} Dec 05 05:31:53 crc kubenswrapper[4652]: I1205 05:31:53.916921 4652 generic.go:334] "Generic (PLEG): container finished" podID="712da180-0e95-46d1-ae94-66811f03cf96" containerID="500a20f367c0275330b38a1b98118b2ad9c3fbf4cf5bfb50a8f9d96033100130" exitCode=0 Dec 05 05:31:53 crc kubenswrapper[4652]: I1205 05:31:53.916969 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txw9m" event={"ID":"712da180-0e95-46d1-ae94-66811f03cf96","Type":"ContainerDied","Data":"500a20f367c0275330b38a1b98118b2ad9c3fbf4cf5bfb50a8f9d96033100130"} Dec 05 05:31:54 crc kubenswrapper[4652]: I1205 05:31:54.924086 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skn8m" event={"ID":"f27dfcad-0744-4e4b-afd8-602951a7e2a7","Type":"ContainerStarted","Data":"0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a"} Dec 05 05:31:54 crc kubenswrapper[4652]: I1205 05:31:54.926399 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-txw9m" event={"ID":"712da180-0e95-46d1-ae94-66811f03cf96","Type":"ContainerStarted","Data":"5698eea3b030a48d9259459337b6c8cc07214b853f9c81905b31c584e61b0f65"} Dec 05 05:31:54 crc kubenswrapper[4652]: I1205 05:31:54.932916 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:31:54 crc kubenswrapper[4652]: I1205 05:31:54.941796 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-skn8m" podStartSLOduration=2.464827709 podStartE2EDuration="3.941781155s" podCreationTimestamp="2025-12-05 05:31:51 +0000 UTC" firstStartedPulling="2025-12-05 05:31:52.905513476 +0000 UTC m=+315.142243744" lastFinishedPulling="2025-12-05 05:31:54.382466923 +0000 UTC m=+316.619197190" observedRunningTime="2025-12-05 05:31:54.940855283 +0000 UTC m=+317.177585551" watchObservedRunningTime="2025-12-05 05:31:54.941781155 +0000 UTC m=+317.178511422" Dec 05 05:31:54 crc kubenswrapper[4652]: I1205 05:31:54.971138 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-txw9m" podStartSLOduration=2.478671454 podStartE2EDuration="3.971124403s" podCreationTimestamp="2025-12-05 05:31:51 +0000 UTC" firstStartedPulling="2025-12-05 05:31:52.907297251 +0000 UTC m=+315.144027518" lastFinishedPulling="2025-12-05 05:31:54.3997502 +0000 UTC m=+316.636480467" observedRunningTime="2025-12-05 05:31:54.969582754 +0000 UTC m=+317.206313021" watchObservedRunningTime="2025-12-05 05:31:54.971124403 +0000 UTC m=+317.207854671" Dec 05 05:31:59 crc kubenswrapper[4652]: I1205 05:31:59.300348 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:59 crc kubenswrapper[4652]: I1205 05:31:59.301703 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:59 crc kubenswrapper[4652]: I1205 05:31:59.338282 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:59 crc kubenswrapper[4652]: I1205 05:31:59.488713 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:59 crc kubenswrapper[4652]: I1205 05:31:59.488780 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:59 crc kubenswrapper[4652]: I1205 05:31:59.521234 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:31:59 crc kubenswrapper[4652]: I1205 05:31:59.986763 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-84w6t" Dec 05 05:31:59 crc kubenswrapper[4652]: I1205 05:31:59.990872 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xz9mv" Dec 05 05:32:01 crc kubenswrapper[4652]: I1205 05:32:01.690017 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:32:01 crc kubenswrapper[4652]: I1205 05:32:01.690422 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:32:01 crc kubenswrapper[4652]: I1205 05:32:01.729212 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:32:01 crc kubenswrapper[4652]: I1205 05:32:01.880870 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:32:01 crc kubenswrapper[4652]: I1205 05:32:01.880947 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:32:01 crc kubenswrapper[4652]: I1205 05:32:01.912744 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:32:01 crc kubenswrapper[4652]: I1205 05:32:01.998697 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-txw9m" Dec 05 05:32:01 crc kubenswrapper[4652]: I1205 05:32:01.999184 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-skn8m" Dec 05 05:32:04 crc kubenswrapper[4652]: I1205 05:32:04.150178 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:32:04 crc kubenswrapper[4652]: I1205 05:32:04.151306 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.526191 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg"] Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.526457 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" podUID="6b251475-c037-4453-8f24-406781fabc44" containerName="route-controller-manager" containerID="cri-o://bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b" gracePeriod=30 Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.528875 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5mktp"] Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.529068 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" podUID="31d139be-8c3b-4ff9-9e9d-872906e7a547" containerName="controller-manager" containerID="cri-o://632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202" gracePeriod=30 Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.853021 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.856782 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875352 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cdvx\" (UniqueName: \"kubernetes.io/projected/31d139be-8c3b-4ff9-9e9d-872906e7a547-kube-api-access-6cdvx\") pod \"31d139be-8c3b-4ff9-9e9d-872906e7a547\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875424 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gfb8\" (UniqueName: \"kubernetes.io/projected/6b251475-c037-4453-8f24-406781fabc44-kube-api-access-8gfb8\") pod \"6b251475-c037-4453-8f24-406781fabc44\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875451 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b251475-c037-4453-8f24-406781fabc44-serving-cert\") pod \"6b251475-c037-4453-8f24-406781fabc44\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875498 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-config\") pod \"31d139be-8c3b-4ff9-9e9d-872906e7a547\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875533 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31d139be-8c3b-4ff9-9e9d-872906e7a547-serving-cert\") pod \"31d139be-8c3b-4ff9-9e9d-872906e7a547\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875551 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-client-ca\") pod \"6b251475-c037-4453-8f24-406781fabc44\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875615 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-config\") pod \"6b251475-c037-4453-8f24-406781fabc44\" (UID: \"6b251475-c037-4453-8f24-406781fabc44\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875632 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-proxy-ca-bundles\") pod \"31d139be-8c3b-4ff9-9e9d-872906e7a547\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.875688 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-client-ca\") pod \"31d139be-8c3b-4ff9-9e9d-872906e7a547\" (UID: \"31d139be-8c3b-4ff9-9e9d-872906e7a547\") " Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.876405 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-client-ca" (OuterVolumeSpecName: "client-ca") pod "31d139be-8c3b-4ff9-9e9d-872906e7a547" (UID: "31d139be-8c3b-4ff9-9e9d-872906e7a547"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.876733 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-client-ca" (OuterVolumeSpecName: "client-ca") pod "6b251475-c037-4453-8f24-406781fabc44" (UID: "6b251475-c037-4453-8f24-406781fabc44"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.877043 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-config" (OuterVolumeSpecName: "config") pod "6b251475-c037-4453-8f24-406781fabc44" (UID: "6b251475-c037-4453-8f24-406781fabc44"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.877290 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "31d139be-8c3b-4ff9-9e9d-872906e7a547" (UID: "31d139be-8c3b-4ff9-9e9d-872906e7a547"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.878026 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-config" (OuterVolumeSpecName: "config") pod "31d139be-8c3b-4ff9-9e9d-872906e7a547" (UID: "31d139be-8c3b-4ff9-9e9d-872906e7a547"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.880956 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b251475-c037-4453-8f24-406781fabc44-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6b251475-c037-4453-8f24-406781fabc44" (UID: "6b251475-c037-4453-8f24-406781fabc44"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.882797 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d139be-8c3b-4ff9-9e9d-872906e7a547-kube-api-access-6cdvx" (OuterVolumeSpecName: "kube-api-access-6cdvx") pod "31d139be-8c3b-4ff9-9e9d-872906e7a547" (UID: "31d139be-8c3b-4ff9-9e9d-872906e7a547"). InnerVolumeSpecName "kube-api-access-6cdvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.883800 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d139be-8c3b-4ff9-9e9d-872906e7a547-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "31d139be-8c3b-4ff9-9e9d-872906e7a547" (UID: "31d139be-8c3b-4ff9-9e9d-872906e7a547"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.883880 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b251475-c037-4453-8f24-406781fabc44-kube-api-access-8gfb8" (OuterVolumeSpecName: "kube-api-access-8gfb8") pod "6b251475-c037-4453-8f24-406781fabc44" (UID: "6b251475-c037-4453-8f24-406781fabc44"). InnerVolumeSpecName "kube-api-access-8gfb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977228 4652 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977276 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cdvx\" (UniqueName: \"kubernetes.io/projected/31d139be-8c3b-4ff9-9e9d-872906e7a547-kube-api-access-6cdvx\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977290 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gfb8\" (UniqueName: \"kubernetes.io/projected/6b251475-c037-4453-8f24-406781fabc44-kube-api-access-8gfb8\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977302 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b251475-c037-4453-8f24-406781fabc44-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977311 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977320 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31d139be-8c3b-4ff9-9e9d-872906e7a547-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977339 4652 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977348 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b251475-c037-4453-8f24-406781fabc44-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.977357 4652 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/31d139be-8c3b-4ff9-9e9d-872906e7a547-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.984419 4652 generic.go:334] "Generic (PLEG): container finished" podID="6b251475-c037-4453-8f24-406781fabc44" containerID="bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b" exitCode=0 Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.984491 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" event={"ID":"6b251475-c037-4453-8f24-406781fabc44","Type":"ContainerDied","Data":"bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b"} Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.984527 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" event={"ID":"6b251475-c037-4453-8f24-406781fabc44","Type":"ContainerDied","Data":"f8b1e25fe22ec7df8dc736cb6180ea92c77ace675b721cbb4355198375681410"} Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.984549 4652 scope.go:117] "RemoveContainer" containerID="bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.984705 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.986223 4652 generic.go:334] "Generic (PLEG): container finished" podID="31d139be-8c3b-4ff9-9e9d-872906e7a547" containerID="632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202" exitCode=0 Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.986363 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" event={"ID":"31d139be-8c3b-4ff9-9e9d-872906e7a547","Type":"ContainerDied","Data":"632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202"} Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.986526 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" event={"ID":"31d139be-8c3b-4ff9-9e9d-872906e7a547","Type":"ContainerDied","Data":"d0c0e26e4f2711ac619b48134b7688792183ea6df1bc871bba3cc9562b8da274"} Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.986386 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5mktp" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.999008 4652 scope.go:117] "RemoveContainer" containerID="bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b" Dec 05 05:32:05 crc kubenswrapper[4652]: E1205 05:32:05.999335 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b\": container with ID starting with bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b not found: ID does not exist" containerID="bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.999371 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b"} err="failed to get container status \"bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b\": rpc error: code = NotFound desc = could not find container \"bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b\": container with ID starting with bc5288477eda2f5d822e9df097c0f4332a7068357d7ec299ee6afe953997021b not found: ID does not exist" Dec 05 05:32:05 crc kubenswrapper[4652]: I1205 05:32:05.999396 4652 scope.go:117] "RemoveContainer" containerID="632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202" Dec 05 05:32:06 crc kubenswrapper[4652]: I1205 05:32:06.012749 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg"] Dec 05 05:32:06 crc kubenswrapper[4652]: I1205 05:32:06.015127 4652 scope.go:117] "RemoveContainer" containerID="632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202" Dec 05 05:32:06 crc kubenswrapper[4652]: E1205 05:32:06.015512 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202\": container with ID starting with 632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202 not found: ID does not exist" containerID="632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202" Dec 05 05:32:06 crc kubenswrapper[4652]: I1205 05:32:06.015546 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202"} err="failed to get container status \"632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202\": rpc error: code = NotFound desc = could not find container \"632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202\": container with ID starting with 632344ad8111f0a02b1a9f7f5e1e88ee5c945657452482f98509539ae2eb0202 not found: ID does not exist" Dec 05 05:32:06 crc kubenswrapper[4652]: I1205 05:32:06.016361 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9lzgg"] Dec 05 05:32:06 crc kubenswrapper[4652]: I1205 05:32:06.018776 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5mktp"] Dec 05 05:32:06 crc kubenswrapper[4652]: I1205 05:32:06.021175 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5mktp"] Dec 05 05:32:06 crc kubenswrapper[4652]: I1205 05:32:06.131788 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d139be-8c3b-4ff9-9e9d-872906e7a547" path="/var/lib/kubelet/pods/31d139be-8c3b-4ff9-9e9d-872906e7a547/volumes" Dec 05 05:32:06 crc kubenswrapper[4652]: I1205 05:32:06.132414 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b251475-c037-4453-8f24-406781fabc44" path="/var/lib/kubelet/pods/6b251475-c037-4453-8f24-406781fabc44/volumes" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.302207 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-754576dcc6-prfpm"] Dec 05 05:32:07 crc kubenswrapper[4652]: E1205 05:32:07.302854 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b251475-c037-4453-8f24-406781fabc44" containerName="route-controller-manager" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.302872 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b251475-c037-4453-8f24-406781fabc44" containerName="route-controller-manager" Dec 05 05:32:07 crc kubenswrapper[4652]: E1205 05:32:07.302894 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31d139be-8c3b-4ff9-9e9d-872906e7a547" containerName="controller-manager" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.302904 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="31d139be-8c3b-4ff9-9e9d-872906e7a547" containerName="controller-manager" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.303014 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b251475-c037-4453-8f24-406781fabc44" containerName="route-controller-manager" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.303028 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="31d139be-8c3b-4ff9-9e9d-872906e7a547" containerName="controller-manager" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.303591 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.306430 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.306691 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.306723 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.306769 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.307346 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.307351 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.310394 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq"] Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.311161 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.313370 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.316128 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.319524 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.319713 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.319720 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.320971 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.324246 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.332802 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-754576dcc6-prfpm"] Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.337736 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq"] Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.392375 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-proxy-ca-bundles\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.392587 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-client-ca\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.392716 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjmc2\" (UniqueName: \"kubernetes.io/projected/aec62855-7d41-4dca-8532-106842c6f322-kube-api-access-fjmc2\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.392916 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/502cd3a6-8fd9-4592-9b54-3f716bd00de5-serving-cert\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.393004 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-client-ca\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.393111 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5lpp\" (UniqueName: \"kubernetes.io/projected/502cd3a6-8fd9-4592-9b54-3f716bd00de5-kube-api-access-p5lpp\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.393188 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-config\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.393323 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aec62855-7d41-4dca-8532-106842c6f322-serving-cert\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.393380 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-config\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.493734 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-client-ca\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.493857 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjmc2\" (UniqueName: \"kubernetes.io/projected/aec62855-7d41-4dca-8532-106842c6f322-kube-api-access-fjmc2\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.493946 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/502cd3a6-8fd9-4592-9b54-3f716bd00de5-serving-cert\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.494078 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-client-ca\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.494173 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5lpp\" (UniqueName: \"kubernetes.io/projected/502cd3a6-8fd9-4592-9b54-3f716bd00de5-kube-api-access-p5lpp\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.494266 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-config\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.494355 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aec62855-7d41-4dca-8532-106842c6f322-serving-cert\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.494453 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-config\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.494590 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-proxy-ca-bundles\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.495013 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-client-ca\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.495872 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-client-ca\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.496444 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-config\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.496840 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-proxy-ca-bundles\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.497508 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-config\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.499920 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aec62855-7d41-4dca-8532-106842c6f322-serving-cert\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.500363 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/502cd3a6-8fd9-4592-9b54-3f716bd00de5-serving-cert\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.508036 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5lpp\" (UniqueName: \"kubernetes.io/projected/502cd3a6-8fd9-4592-9b54-3f716bd00de5-kube-api-access-p5lpp\") pod \"route-controller-manager-d6ddf6c78-xqfjq\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.509530 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjmc2\" (UniqueName: \"kubernetes.io/projected/aec62855-7d41-4dca-8532-106842c6f322-kube-api-access-fjmc2\") pod \"controller-manager-754576dcc6-prfpm\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.629216 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.639087 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:07 crc kubenswrapper[4652]: I1205 05:32:07.993494 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-754576dcc6-prfpm"] Dec 05 05:32:07 crc kubenswrapper[4652]: W1205 05:32:07.998136 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaec62855_7d41_4dca_8532_106842c6f322.slice/crio-58ffa1b16edf0f7f6adba2a6c851d9a37b846477d7e5a1ac7642d14ae627e5e6 WatchSource:0}: Error finding container 58ffa1b16edf0f7f6adba2a6c851d9a37b846477d7e5a1ac7642d14ae627e5e6: Status 404 returned error can't find the container with id 58ffa1b16edf0f7f6adba2a6c851d9a37b846477d7e5a1ac7642d14ae627e5e6 Dec 05 05:32:08 crc kubenswrapper[4652]: I1205 05:32:08.021531 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq"] Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.006198 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" event={"ID":"502cd3a6-8fd9-4592-9b54-3f716bd00de5","Type":"ContainerStarted","Data":"149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5"} Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.006691 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" event={"ID":"502cd3a6-8fd9-4592-9b54-3f716bd00de5","Type":"ContainerStarted","Data":"d214e86b8be017f8070620708db4842212250ce1f8ac37602727019a865581c4"} Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.006748 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.008793 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" event={"ID":"aec62855-7d41-4dca-8532-106842c6f322","Type":"ContainerStarted","Data":"3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b"} Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.008844 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" event={"ID":"aec62855-7d41-4dca-8532-106842c6f322","Type":"ContainerStarted","Data":"58ffa1b16edf0f7f6adba2a6c851d9a37b846477d7e5a1ac7642d14ae627e5e6"} Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.009008 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.013958 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.014151 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.028025 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" podStartSLOduration=4.028006296 podStartE2EDuration="4.028006296s" podCreationTimestamp="2025-12-05 05:32:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:32:09.023030949 +0000 UTC m=+331.259761216" watchObservedRunningTime="2025-12-05 05:32:09.028006296 +0000 UTC m=+331.264736563" Dec 05 05:32:09 crc kubenswrapper[4652]: I1205 05:32:09.049114 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" podStartSLOduration=4.049095117 podStartE2EDuration="4.049095117s" podCreationTimestamp="2025-12-05 05:32:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:32:09.048386734 +0000 UTC m=+331.285117001" watchObservedRunningTime="2025-12-05 05:32:09.049095117 +0000 UTC m=+331.285825384" Dec 05 05:32:34 crc kubenswrapper[4652]: I1205 05:32:34.150715 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:32:34 crc kubenswrapper[4652]: I1205 05:32:34.152883 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.328872 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-v62p2"] Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.330071 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.342671 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-v62p2"] Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.437908 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-registry-certificates\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.438305 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wx5n4\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-kube-api-access-wx5n4\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.438357 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-registry-tls\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.438375 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-trusted-ca\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.438400 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-bound-sa-token\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.438421 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.438483 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.438533 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.456717 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.540962 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-registry-certificates\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.541116 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wx5n4\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-kube-api-access-wx5n4\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.541205 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-trusted-ca\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.541280 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-registry-tls\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.541346 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-bound-sa-token\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.541428 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.541670 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.542297 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-registry-certificates\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.543248 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-trusted-ca\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.543548 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.548635 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.548740 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-registry-tls\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.563679 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wx5n4\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-kube-api-access-wx5n4\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.566359 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/047b6f1a-2ba4-4428-9bf5-fa9366890ad6-bound-sa-token\") pod \"image-registry-66df7c8f76-v62p2\" (UID: \"047b6f1a-2ba4-4428-9bf5-fa9366890ad6\") " pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:42 crc kubenswrapper[4652]: I1205 05:32:42.648518 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:43 crc kubenswrapper[4652]: I1205 05:32:43.007453 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-v62p2"] Dec 05 05:32:43 crc kubenswrapper[4652]: I1205 05:32:43.177175 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" event={"ID":"047b6f1a-2ba4-4428-9bf5-fa9366890ad6","Type":"ContainerStarted","Data":"8fd5d28f003b6d9e707a013a7ea66b7286fdbdd783808e79ddff767124ae1856"} Dec 05 05:32:43 crc kubenswrapper[4652]: I1205 05:32:43.177231 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" event={"ID":"047b6f1a-2ba4-4428-9bf5-fa9366890ad6","Type":"ContainerStarted","Data":"d16d915ad66418890a66c2317196a3e166a95ad79ba3b142ad008af8c97f3980"} Dec 05 05:32:43 crc kubenswrapper[4652]: I1205 05:32:43.178132 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:32:43 crc kubenswrapper[4652]: I1205 05:32:43.199496 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" podStartSLOduration=1.199476172 podStartE2EDuration="1.199476172s" podCreationTimestamp="2025-12-05 05:32:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:32:43.198767921 +0000 UTC m=+365.435498188" watchObservedRunningTime="2025-12-05 05:32:43.199476172 +0000 UTC m=+365.436206439" Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.509253 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq"] Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.509786 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" podUID="502cd3a6-8fd9-4592-9b54-3f716bd00de5" containerName="route-controller-manager" containerID="cri-o://149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5" gracePeriod=30 Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.877737 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.989160 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-client-ca\") pod \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.989220 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/502cd3a6-8fd9-4592-9b54-3f716bd00de5-serving-cert\") pod \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.989256 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-config\") pod \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.989305 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5lpp\" (UniqueName: \"kubernetes.io/projected/502cd3a6-8fd9-4592-9b54-3f716bd00de5-kube-api-access-p5lpp\") pod \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\" (UID: \"502cd3a6-8fd9-4592-9b54-3f716bd00de5\") " Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.990427 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-client-ca" (OuterVolumeSpecName: "client-ca") pod "502cd3a6-8fd9-4592-9b54-3f716bd00de5" (UID: "502cd3a6-8fd9-4592-9b54-3f716bd00de5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.990503 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-config" (OuterVolumeSpecName: "config") pod "502cd3a6-8fd9-4592-9b54-3f716bd00de5" (UID: "502cd3a6-8fd9-4592-9b54-3f716bd00de5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.995233 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/502cd3a6-8fd9-4592-9b54-3f716bd00de5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "502cd3a6-8fd9-4592-9b54-3f716bd00de5" (UID: "502cd3a6-8fd9-4592-9b54-3f716bd00de5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:32:45 crc kubenswrapper[4652]: I1205 05:32:45.995835 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/502cd3a6-8fd9-4592-9b54-3f716bd00de5-kube-api-access-p5lpp" (OuterVolumeSpecName: "kube-api-access-p5lpp") pod "502cd3a6-8fd9-4592-9b54-3f716bd00de5" (UID: "502cd3a6-8fd9-4592-9b54-3f716bd00de5"). InnerVolumeSpecName "kube-api-access-p5lpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.090874 4652 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.090903 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/502cd3a6-8fd9-4592-9b54-3f716bd00de5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.090913 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/502cd3a6-8fd9-4592-9b54-3f716bd00de5-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.090923 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5lpp\" (UniqueName: \"kubernetes.io/projected/502cd3a6-8fd9-4592-9b54-3f716bd00de5-kube-api-access-p5lpp\") on node \"crc\" DevicePath \"\"" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.192189 4652 generic.go:334] "Generic (PLEG): container finished" podID="502cd3a6-8fd9-4592-9b54-3f716bd00de5" containerID="149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5" exitCode=0 Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.192251 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.192259 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" event={"ID":"502cd3a6-8fd9-4592-9b54-3f716bd00de5","Type":"ContainerDied","Data":"149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5"} Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.192316 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq" event={"ID":"502cd3a6-8fd9-4592-9b54-3f716bd00de5","Type":"ContainerDied","Data":"d214e86b8be017f8070620708db4842212250ce1f8ac37602727019a865581c4"} Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.192340 4652 scope.go:117] "RemoveContainer" containerID="149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.213622 4652 scope.go:117] "RemoveContainer" containerID="149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5" Dec 05 05:32:46 crc kubenswrapper[4652]: E1205 05:32:46.214389 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5\": container with ID starting with 149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5 not found: ID does not exist" containerID="149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.214456 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5"} err="failed to get container status \"149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5\": rpc error: code = NotFound desc = could not find container \"149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5\": container with ID starting with 149fbb4d5c6d9cf69a9200db8fc2e799d416947e2d658d72b2a7bef9362018c5 not found: ID does not exist" Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.217812 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq"] Dec 05 05:32:46 crc kubenswrapper[4652]: I1205 05:32:46.220478 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d6ddf6c78-xqfjq"] Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.334411 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8"] Dec 05 05:32:47 crc kubenswrapper[4652]: E1205 05:32:47.334962 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="502cd3a6-8fd9-4592-9b54-3f716bd00de5" containerName="route-controller-manager" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.334977 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="502cd3a6-8fd9-4592-9b54-3f716bd00de5" containerName="route-controller-manager" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.335079 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="502cd3a6-8fd9-4592-9b54-3f716bd00de5" containerName="route-controller-manager" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.335480 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.337859 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.338415 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.338427 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.338647 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.338751 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.340654 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.344744 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8"] Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.523823 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f1a273c-76e3-4264-b4dd-1e35fca78a55-config\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.523893 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8qbv\" (UniqueName: \"kubernetes.io/projected/8f1a273c-76e3-4264-b4dd-1e35fca78a55-kube-api-access-h8qbv\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.524023 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f1a273c-76e3-4264-b4dd-1e35fca78a55-client-ca\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.524049 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f1a273c-76e3-4264-b4dd-1e35fca78a55-serving-cert\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.625536 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f1a273c-76e3-4264-b4dd-1e35fca78a55-config\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.625632 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8qbv\" (UniqueName: \"kubernetes.io/projected/8f1a273c-76e3-4264-b4dd-1e35fca78a55-kube-api-access-h8qbv\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.625677 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f1a273c-76e3-4264-b4dd-1e35fca78a55-client-ca\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.625702 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f1a273c-76e3-4264-b4dd-1e35fca78a55-serving-cert\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.626961 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f1a273c-76e3-4264-b4dd-1e35fca78a55-client-ca\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.627071 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f1a273c-76e3-4264-b4dd-1e35fca78a55-config\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.633948 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f1a273c-76e3-4264-b4dd-1e35fca78a55-serving-cert\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.640198 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8qbv\" (UniqueName: \"kubernetes.io/projected/8f1a273c-76e3-4264-b4dd-1e35fca78a55-kube-api-access-h8qbv\") pod \"route-controller-manager-d8c4f49fc-c9zd8\" (UID: \"8f1a273c-76e3-4264-b4dd-1e35fca78a55\") " pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:47 crc kubenswrapper[4652]: I1205 05:32:47.648175 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:48 crc kubenswrapper[4652]: I1205 05:32:48.041657 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8"] Dec 05 05:32:48 crc kubenswrapper[4652]: I1205 05:32:48.131690 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="502cd3a6-8fd9-4592-9b54-3f716bd00de5" path="/var/lib/kubelet/pods/502cd3a6-8fd9-4592-9b54-3f716bd00de5/volumes" Dec 05 05:32:48 crc kubenswrapper[4652]: I1205 05:32:48.205193 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" event={"ID":"8f1a273c-76e3-4264-b4dd-1e35fca78a55","Type":"ContainerStarted","Data":"c7046d93dd688bce792c46cda8542e9350c26f7d37d3a0f93abd5b14895a43e2"} Dec 05 05:32:48 crc kubenswrapper[4652]: I1205 05:32:48.205234 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" event={"ID":"8f1a273c-76e3-4264-b4dd-1e35fca78a55","Type":"ContainerStarted","Data":"817747e25f55ace3da70316bbb8ddf39e86b8f694cf5eefac4e529f77b0a667e"} Dec 05 05:32:48 crc kubenswrapper[4652]: I1205 05:32:48.206397 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:32:48 crc kubenswrapper[4652]: I1205 05:32:48.208053 4652 patch_prober.go:28] interesting pod/route-controller-manager-d8c4f49fc-c9zd8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" start-of-body= Dec 05 05:32:48 crc kubenswrapper[4652]: I1205 05:32:48.208525 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" podUID="8f1a273c-76e3-4264-b4dd-1e35fca78a55" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" Dec 05 05:32:48 crc kubenswrapper[4652]: I1205 05:32:48.218961 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" podStartSLOduration=3.218940673 podStartE2EDuration="3.218940673s" podCreationTimestamp="2025-12-05 05:32:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:32:48.217937476 +0000 UTC m=+370.454667743" watchObservedRunningTime="2025-12-05 05:32:48.218940673 +0000 UTC m=+370.455670940" Dec 05 05:32:49 crc kubenswrapper[4652]: I1205 05:32:49.223975 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-d8c4f49fc-c9zd8" Dec 05 05:33:02 crc kubenswrapper[4652]: I1205 05:33:02.652456 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-v62p2" Dec 05 05:33:02 crc kubenswrapper[4652]: I1205 05:33:02.687837 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n2n75"] Dec 05 05:33:04 crc kubenswrapper[4652]: I1205 05:33:04.150537 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:33:04 crc kubenswrapper[4652]: I1205 05:33:04.150904 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:33:04 crc kubenswrapper[4652]: I1205 05:33:04.150944 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:33:04 crc kubenswrapper[4652]: I1205 05:33:04.151296 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4e90520058f578e3fe086566cbaa2b220e39ddf1f77b6c161c8e55a7db53ca5a"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:33:04 crc kubenswrapper[4652]: I1205 05:33:04.151355 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://4e90520058f578e3fe086566cbaa2b220e39ddf1f77b6c161c8e55a7db53ca5a" gracePeriod=600 Dec 05 05:33:04 crc kubenswrapper[4652]: I1205 05:33:04.284003 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="4e90520058f578e3fe086566cbaa2b220e39ddf1f77b6c161c8e55a7db53ca5a" exitCode=0 Dec 05 05:33:04 crc kubenswrapper[4652]: I1205 05:33:04.284049 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"4e90520058f578e3fe086566cbaa2b220e39ddf1f77b6c161c8e55a7db53ca5a"} Dec 05 05:33:04 crc kubenswrapper[4652]: I1205 05:33:04.284088 4652 scope.go:117] "RemoveContainer" containerID="0414316d7c009f29730d5c763df99b60925ee0c79cb9c670f355c46e997002aa" Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.292091 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"7e133200e3f39bb8a8ffb76a76a2e5bfeff1444170459551c8b4e9c460f0294f"} Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.497987 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-754576dcc6-prfpm"] Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.498515 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" podUID="aec62855-7d41-4dca-8532-106842c6f322" containerName="controller-manager" containerID="cri-o://3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b" gracePeriod=30 Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.807441 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.952773 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjmc2\" (UniqueName: \"kubernetes.io/projected/aec62855-7d41-4dca-8532-106842c6f322-kube-api-access-fjmc2\") pod \"aec62855-7d41-4dca-8532-106842c6f322\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.952867 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-config\") pod \"aec62855-7d41-4dca-8532-106842c6f322\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.952902 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-client-ca\") pod \"aec62855-7d41-4dca-8532-106842c6f322\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.952922 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-proxy-ca-bundles\") pod \"aec62855-7d41-4dca-8532-106842c6f322\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.952965 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aec62855-7d41-4dca-8532-106842c6f322-serving-cert\") pod \"aec62855-7d41-4dca-8532-106842c6f322\" (UID: \"aec62855-7d41-4dca-8532-106842c6f322\") " Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.953823 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "aec62855-7d41-4dca-8532-106842c6f322" (UID: "aec62855-7d41-4dca-8532-106842c6f322"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.953835 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-client-ca" (OuterVolumeSpecName: "client-ca") pod "aec62855-7d41-4dca-8532-106842c6f322" (UID: "aec62855-7d41-4dca-8532-106842c6f322"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.953850 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-config" (OuterVolumeSpecName: "config") pod "aec62855-7d41-4dca-8532-106842c6f322" (UID: "aec62855-7d41-4dca-8532-106842c6f322"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.959043 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aec62855-7d41-4dca-8532-106842c6f322-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "aec62855-7d41-4dca-8532-106842c6f322" (UID: "aec62855-7d41-4dca-8532-106842c6f322"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:33:05 crc kubenswrapper[4652]: I1205 05:33:05.959045 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aec62855-7d41-4dca-8532-106842c6f322-kube-api-access-fjmc2" (OuterVolumeSpecName: "kube-api-access-fjmc2") pod "aec62855-7d41-4dca-8532-106842c6f322" (UID: "aec62855-7d41-4dca-8532-106842c6f322"). InnerVolumeSpecName "kube-api-access-fjmc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.054692 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.054795 4652 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.054847 4652 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/aec62855-7d41-4dca-8532-106842c6f322-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.054911 4652 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aec62855-7d41-4dca-8532-106842c6f322-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.054970 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjmc2\" (UniqueName: \"kubernetes.io/projected/aec62855-7d41-4dca-8532-106842c6f322-kube-api-access-fjmc2\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.297182 4652 generic.go:334] "Generic (PLEG): container finished" podID="aec62855-7d41-4dca-8532-106842c6f322" containerID="3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b" exitCode=0 Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.297241 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" event={"ID":"aec62855-7d41-4dca-8532-106842c6f322","Type":"ContainerDied","Data":"3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b"} Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.297284 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" event={"ID":"aec62855-7d41-4dca-8532-106842c6f322","Type":"ContainerDied","Data":"58ffa1b16edf0f7f6adba2a6c851d9a37b846477d7e5a1ac7642d14ae627e5e6"} Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.297307 4652 scope.go:117] "RemoveContainer" containerID="3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.298233 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-754576dcc6-prfpm" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.313638 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-754576dcc6-prfpm"] Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.314431 4652 scope.go:117] "RemoveContainer" containerID="3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b" Dec 05 05:33:06 crc kubenswrapper[4652]: E1205 05:33:06.314767 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b\": container with ID starting with 3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b not found: ID does not exist" containerID="3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.314801 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b"} err="failed to get container status \"3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b\": rpc error: code = NotFound desc = could not find container \"3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b\": container with ID starting with 3fe0ba0b34bf7852305c31997a4321f241e7d42692b92aaf8385aa02c312a71b not found: ID does not exist" Dec 05 05:33:06 crc kubenswrapper[4652]: I1205 05:33:06.316056 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-754576dcc6-prfpm"] Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.350584 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm"] Dec 05 05:33:07 crc kubenswrapper[4652]: E1205 05:33:07.351285 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aec62855-7d41-4dca-8532-106842c6f322" containerName="controller-manager" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.351303 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="aec62855-7d41-4dca-8532-106842c6f322" containerName="controller-manager" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.351446 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="aec62855-7d41-4dca-8532-106842c6f322" containerName="controller-manager" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.352041 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.354441 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.355082 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.355273 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.355269 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.355654 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.356352 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.361252 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.361916 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm"] Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.473129 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-proxy-ca-bundles\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.473464 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm9d2\" (UniqueName: \"kubernetes.io/projected/969f6e34-1d1d-427e-bdc9-de153261478b-kube-api-access-cm9d2\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.473720 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/969f6e34-1d1d-427e-bdc9-de153261478b-serving-cert\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.473904 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-config\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.473964 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-client-ca\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.575850 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm9d2\" (UniqueName: \"kubernetes.io/projected/969f6e34-1d1d-427e-bdc9-de153261478b-kube-api-access-cm9d2\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.575930 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/969f6e34-1d1d-427e-bdc9-de153261478b-serving-cert\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.575980 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-config\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.576027 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-client-ca\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.576057 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-proxy-ca-bundles\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.577397 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-client-ca\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.577413 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-proxy-ca-bundles\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.578068 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/969f6e34-1d1d-427e-bdc9-de153261478b-config\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.581995 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/969f6e34-1d1d-427e-bdc9-de153261478b-serving-cert\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.591207 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm9d2\" (UniqueName: \"kubernetes.io/projected/969f6e34-1d1d-427e-bdc9-de153261478b-kube-api-access-cm9d2\") pod \"controller-manager-5c6b6d7c88-t7rwm\" (UID: \"969f6e34-1d1d-427e-bdc9-de153261478b\") " pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:07 crc kubenswrapper[4652]: I1205 05:33:07.669801 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:08 crc kubenswrapper[4652]: I1205 05:33:08.025794 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm"] Dec 05 05:33:08 crc kubenswrapper[4652]: I1205 05:33:08.132349 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aec62855-7d41-4dca-8532-106842c6f322" path="/var/lib/kubelet/pods/aec62855-7d41-4dca-8532-106842c6f322/volumes" Dec 05 05:33:08 crc kubenswrapper[4652]: I1205 05:33:08.307935 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" event={"ID":"969f6e34-1d1d-427e-bdc9-de153261478b","Type":"ContainerStarted","Data":"9035cff80a39b3c44d80670eb1786261660833dbc82f53b1fce1a3dd02323a4f"} Dec 05 05:33:08 crc kubenswrapper[4652]: I1205 05:33:08.308303 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:08 crc kubenswrapper[4652]: I1205 05:33:08.308317 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" event={"ID":"969f6e34-1d1d-427e-bdc9-de153261478b","Type":"ContainerStarted","Data":"767aab21283eac4643e9345f38392c1b4da5546fbf13d6168b69ff85b849c59c"} Dec 05 05:33:08 crc kubenswrapper[4652]: I1205 05:33:08.318269 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" Dec 05 05:33:08 crc kubenswrapper[4652]: I1205 05:33:08.322488 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5c6b6d7c88-t7rwm" podStartSLOduration=3.322469688 podStartE2EDuration="3.322469688s" podCreationTimestamp="2025-12-05 05:33:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:33:08.321330726 +0000 UTC m=+390.558060993" watchObservedRunningTime="2025-12-05 05:33:08.322469688 +0000 UTC m=+390.559199955" Dec 05 05:33:27 crc kubenswrapper[4652]: I1205 05:33:27.718225 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" podUID="b85664fc-f6d6-49a2-b9a9-22c6b523d5de" containerName="registry" containerID="cri-o://471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b" gracePeriod=30 Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.102205 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.235996 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.236060 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-installation-pull-secrets\") pod \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.236085 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnn4j\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-kube-api-access-mnn4j\") pod \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.236133 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-ca-trust-extracted\") pod \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.236165 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-tls\") pod \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.236199 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-bound-sa-token\") pod \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.236274 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-certificates\") pod \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.236329 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-trusted-ca\") pod \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\" (UID: \"b85664fc-f6d6-49a2-b9a9-22c6b523d5de\") " Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.237465 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b85664fc-f6d6-49a2-b9a9-22c6b523d5de" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.238259 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b85664fc-f6d6-49a2-b9a9-22c6b523d5de" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.242766 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b85664fc-f6d6-49a2-b9a9-22c6b523d5de" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.242959 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b85664fc-f6d6-49a2-b9a9-22c6b523d5de" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.242990 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b85664fc-f6d6-49a2-b9a9-22c6b523d5de" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.243195 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-kube-api-access-mnn4j" (OuterVolumeSpecName: "kube-api-access-mnn4j") pod "b85664fc-f6d6-49a2-b9a9-22c6b523d5de" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de"). InnerVolumeSpecName "kube-api-access-mnn4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.244997 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b85664fc-f6d6-49a2-b9a9-22c6b523d5de" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.250289 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b85664fc-f6d6-49a2-b9a9-22c6b523d5de" (UID: "b85664fc-f6d6-49a2-b9a9-22c6b523d5de"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.337465 4652 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.337492 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.337517 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnn4j\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-kube-api-access-mnn4j\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.337528 4652 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.337537 4652 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.337547 4652 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.337580 4652 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b85664fc-f6d6-49a2-b9a9-22c6b523d5de-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.405913 4652 generic.go:334] "Generic (PLEG): container finished" podID="b85664fc-f6d6-49a2-b9a9-22c6b523d5de" containerID="471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b" exitCode=0 Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.405957 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" event={"ID":"b85664fc-f6d6-49a2-b9a9-22c6b523d5de","Type":"ContainerDied","Data":"471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b"} Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.405979 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.405991 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n2n75" event={"ID":"b85664fc-f6d6-49a2-b9a9-22c6b523d5de","Type":"ContainerDied","Data":"c2f9970eb9760de7027bff1f440b450e80a003a45e2e57c57cc5dd3070c17eb7"} Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.406013 4652 scope.go:117] "RemoveContainer" containerID="471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.420423 4652 scope.go:117] "RemoveContainer" containerID="471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b" Dec 05 05:33:28 crc kubenswrapper[4652]: E1205 05:33:28.420813 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b\": container with ID starting with 471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b not found: ID does not exist" containerID="471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.420852 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b"} err="failed to get container status \"471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b\": rpc error: code = NotFound desc = could not find container \"471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b\": container with ID starting with 471228cc071d292cb746018747f1ddbd3818e44c2fa7ea73556115ffbb89099b not found: ID does not exist" Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.431967 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n2n75"] Dec 05 05:33:28 crc kubenswrapper[4652]: I1205 05:33:28.435923 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n2n75"] Dec 05 05:33:30 crc kubenswrapper[4652]: I1205 05:33:30.135106 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b85664fc-f6d6-49a2-b9a9-22c6b523d5de" path="/var/lib/kubelet/pods/b85664fc-f6d6-49a2-b9a9-22c6b523d5de/volumes" Dec 05 05:35:04 crc kubenswrapper[4652]: I1205 05:35:04.150266 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:35:04 crc kubenswrapper[4652]: I1205 05:35:04.150918 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:35:34 crc kubenswrapper[4652]: I1205 05:35:34.150680 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:35:34 crc kubenswrapper[4652]: I1205 05:35:34.151297 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.536409 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dttvn"] Dec 05 05:35:57 crc kubenswrapper[4652]: E1205 05:35:57.537087 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b85664fc-f6d6-49a2-b9a9-22c6b523d5de" containerName="registry" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.537101 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b85664fc-f6d6-49a2-b9a9-22c6b523d5de" containerName="registry" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.537180 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b85664fc-f6d6-49a2-b9a9-22c6b523d5de" containerName="registry" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.537537 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-dttvn" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.538647 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zr9n\" (UniqueName: \"kubernetes.io/projected/2b9b3dfa-9a5c-4267-9198-ebd356b60ced-kube-api-access-6zr9n\") pod \"cert-manager-cainjector-7f985d654d-dttvn\" (UID: \"2b9b3dfa-9a5c-4267-9198-ebd356b60ced\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dttvn" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.539327 4652 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-pmx78" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.539632 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.539734 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.545822 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-bvrgw"] Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.547236 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-bvrgw" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.554648 4652 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-flr8c" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.555389 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-g6642"] Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.556093 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.557864 4652 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-6dqj4" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.561344 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-bvrgw"] Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.564222 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-g6642"] Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.576384 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dttvn"] Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.640069 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtjr8\" (UniqueName: \"kubernetes.io/projected/da9536a9-4fcd-43a8-ad83-88903d8a043e-kube-api-access-xtjr8\") pod \"cert-manager-webhook-5655c58dd6-g6642\" (UID: \"da9536a9-4fcd-43a8-ad83-88903d8a043e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.640181 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx6s4\" (UniqueName: \"kubernetes.io/projected/df2f0784-f7cb-40ec-8d8b-4ff0b75578aa-kube-api-access-xx6s4\") pod \"cert-manager-5b446d88c5-bvrgw\" (UID: \"df2f0784-f7cb-40ec-8d8b-4ff0b75578aa\") " pod="cert-manager/cert-manager-5b446d88c5-bvrgw" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.640302 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zr9n\" (UniqueName: \"kubernetes.io/projected/2b9b3dfa-9a5c-4267-9198-ebd356b60ced-kube-api-access-6zr9n\") pod \"cert-manager-cainjector-7f985d654d-dttvn\" (UID: \"2b9b3dfa-9a5c-4267-9198-ebd356b60ced\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dttvn" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.656467 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zr9n\" (UniqueName: \"kubernetes.io/projected/2b9b3dfa-9a5c-4267-9198-ebd356b60ced-kube-api-access-6zr9n\") pod \"cert-manager-cainjector-7f985d654d-dttvn\" (UID: \"2b9b3dfa-9a5c-4267-9198-ebd356b60ced\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dttvn" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.740608 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtjr8\" (UniqueName: \"kubernetes.io/projected/da9536a9-4fcd-43a8-ad83-88903d8a043e-kube-api-access-xtjr8\") pod \"cert-manager-webhook-5655c58dd6-g6642\" (UID: \"da9536a9-4fcd-43a8-ad83-88903d8a043e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.740883 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx6s4\" (UniqueName: \"kubernetes.io/projected/df2f0784-f7cb-40ec-8d8b-4ff0b75578aa-kube-api-access-xx6s4\") pod \"cert-manager-5b446d88c5-bvrgw\" (UID: \"df2f0784-f7cb-40ec-8d8b-4ff0b75578aa\") " pod="cert-manager/cert-manager-5b446d88c5-bvrgw" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.754894 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx6s4\" (UniqueName: \"kubernetes.io/projected/df2f0784-f7cb-40ec-8d8b-4ff0b75578aa-kube-api-access-xx6s4\") pod \"cert-manager-5b446d88c5-bvrgw\" (UID: \"df2f0784-f7cb-40ec-8d8b-4ff0b75578aa\") " pod="cert-manager/cert-manager-5b446d88c5-bvrgw" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.755241 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtjr8\" (UniqueName: \"kubernetes.io/projected/da9536a9-4fcd-43a8-ad83-88903d8a043e-kube-api-access-xtjr8\") pod \"cert-manager-webhook-5655c58dd6-g6642\" (UID: \"da9536a9-4fcd-43a8-ad83-88903d8a043e\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.849777 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-dttvn" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.858200 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-bvrgw" Dec 05 05:35:57 crc kubenswrapper[4652]: I1205 05:35:57.867204 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" Dec 05 05:35:58 crc kubenswrapper[4652]: I1205 05:35:58.228469 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dttvn"] Dec 05 05:35:58 crc kubenswrapper[4652]: I1205 05:35:58.236238 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 05:35:58 crc kubenswrapper[4652]: I1205 05:35:58.251953 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-bvrgw"] Dec 05 05:35:58 crc kubenswrapper[4652]: W1205 05:35:58.254415 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf2f0784_f7cb_40ec_8d8b_4ff0b75578aa.slice/crio-705fe26d8c986f1bab80ddd34d69a420c56c707de14b9641a386398c140c8fd5 WatchSource:0}: Error finding container 705fe26d8c986f1bab80ddd34d69a420c56c707de14b9641a386398c140c8fd5: Status 404 returned error can't find the container with id 705fe26d8c986f1bab80ddd34d69a420c56c707de14b9641a386398c140c8fd5 Dec 05 05:35:58 crc kubenswrapper[4652]: I1205 05:35:58.285880 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-g6642"] Dec 05 05:35:58 crc kubenswrapper[4652]: W1205 05:35:58.287754 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda9536a9_4fcd_43a8_ad83_88903d8a043e.slice/crio-8612c6d2e9990f094e8b5028ff58d7d8322c102d86cce0af5b3aa39c1947f3ae WatchSource:0}: Error finding container 8612c6d2e9990f094e8b5028ff58d7d8322c102d86cce0af5b3aa39c1947f3ae: Status 404 returned error can't find the container with id 8612c6d2e9990f094e8b5028ff58d7d8322c102d86cce0af5b3aa39c1947f3ae Dec 05 05:35:59 crc kubenswrapper[4652]: I1205 05:35:59.138630 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-dttvn" event={"ID":"2b9b3dfa-9a5c-4267-9198-ebd356b60ced","Type":"ContainerStarted","Data":"63efa9c1ba705b4f3f8e4a8c972e679568070b0d91a87a14631b6bbe43e5eff7"} Dec 05 05:35:59 crc kubenswrapper[4652]: I1205 05:35:59.140071 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-bvrgw" event={"ID":"df2f0784-f7cb-40ec-8d8b-4ff0b75578aa","Type":"ContainerStarted","Data":"705fe26d8c986f1bab80ddd34d69a420c56c707de14b9641a386398c140c8fd5"} Dec 05 05:35:59 crc kubenswrapper[4652]: I1205 05:35:59.142064 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" event={"ID":"da9536a9-4fcd-43a8-ad83-88903d8a043e","Type":"ContainerStarted","Data":"8612c6d2e9990f094e8b5028ff58d7d8322c102d86cce0af5b3aa39c1947f3ae"} Dec 05 05:36:01 crc kubenswrapper[4652]: I1205 05:36:01.157172 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-dttvn" event={"ID":"2b9b3dfa-9a5c-4267-9198-ebd356b60ced","Type":"ContainerStarted","Data":"9f60d0c2ccf13875d99c0e888159f633627941f35026ca5b20e330da16916a8b"} Dec 05 05:36:01 crc kubenswrapper[4652]: I1205 05:36:01.162504 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-bvrgw" event={"ID":"df2f0784-f7cb-40ec-8d8b-4ff0b75578aa","Type":"ContainerStarted","Data":"54d425df4428c03d54995c244f603ccb43f49d10a38d38f3a83b77476722d6ce"} Dec 05 05:36:01 crc kubenswrapper[4652]: I1205 05:36:01.165763 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" event={"ID":"da9536a9-4fcd-43a8-ad83-88903d8a043e","Type":"ContainerStarted","Data":"89219071b248005d6db17c6bea1307ad2372c45e0dd577f5b620d3cb48cee630"} Dec 05 05:36:01 crc kubenswrapper[4652]: I1205 05:36:01.166345 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" Dec 05 05:36:01 crc kubenswrapper[4652]: I1205 05:36:01.171531 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-dttvn" podStartSLOduration=2.025195858 podStartE2EDuration="4.171522442s" podCreationTimestamp="2025-12-05 05:35:57 +0000 UTC" firstStartedPulling="2025-12-05 05:35:58.236027273 +0000 UTC m=+560.472757540" lastFinishedPulling="2025-12-05 05:36:00.382353856 +0000 UTC m=+562.619084124" observedRunningTime="2025-12-05 05:36:01.170525317 +0000 UTC m=+563.407255584" watchObservedRunningTime="2025-12-05 05:36:01.171522442 +0000 UTC m=+563.408252709" Dec 05 05:36:01 crc kubenswrapper[4652]: I1205 05:36:01.181055 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" podStartSLOduration=2.083402052 podStartE2EDuration="4.181047348s" podCreationTimestamp="2025-12-05 05:35:57 +0000 UTC" firstStartedPulling="2025-12-05 05:35:58.289471108 +0000 UTC m=+560.526201375" lastFinishedPulling="2025-12-05 05:36:00.387116414 +0000 UTC m=+562.623846671" observedRunningTime="2025-12-05 05:36:01.180499588 +0000 UTC m=+563.417229854" watchObservedRunningTime="2025-12-05 05:36:01.181047348 +0000 UTC m=+563.417777615" Dec 05 05:36:01 crc kubenswrapper[4652]: I1205 05:36:01.193625 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-bvrgw" podStartSLOduration=1.612340459 podStartE2EDuration="4.19360574s" podCreationTimestamp="2025-12-05 05:35:57 +0000 UTC" firstStartedPulling="2025-12-05 05:35:58.266325783 +0000 UTC m=+560.503056050" lastFinishedPulling="2025-12-05 05:36:00.847591064 +0000 UTC m=+563.084321331" observedRunningTime="2025-12-05 05:36:01.190668936 +0000 UTC m=+563.427399203" watchObservedRunningTime="2025-12-05 05:36:01.19360574 +0000 UTC m=+563.430336007" Dec 05 05:36:04 crc kubenswrapper[4652]: I1205 05:36:04.150497 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:36:04 crc kubenswrapper[4652]: I1205 05:36:04.150950 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:36:04 crc kubenswrapper[4652]: I1205 05:36:04.151001 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:36:04 crc kubenswrapper[4652]: I1205 05:36:04.151387 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7e133200e3f39bb8a8ffb76a76a2e5bfeff1444170459551c8b4e9c460f0294f"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:36:04 crc kubenswrapper[4652]: I1205 05:36:04.151456 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://7e133200e3f39bb8a8ffb76a76a2e5bfeff1444170459551c8b4e9c460f0294f" gracePeriod=600 Dec 05 05:36:05 crc kubenswrapper[4652]: I1205 05:36:05.192869 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="7e133200e3f39bb8a8ffb76a76a2e5bfeff1444170459551c8b4e9c460f0294f" exitCode=0 Dec 05 05:36:05 crc kubenswrapper[4652]: I1205 05:36:05.193145 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"7e133200e3f39bb8a8ffb76a76a2e5bfeff1444170459551c8b4e9c460f0294f"} Dec 05 05:36:05 crc kubenswrapper[4652]: I1205 05:36:05.193267 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"d6fbad51d9fa83b6ff2e59f87dc8d974ebb1fcdb8dee5f5d5ecbd852c4da84af"} Dec 05 05:36:05 crc kubenswrapper[4652]: I1205 05:36:05.193300 4652 scope.go:117] "RemoveContainer" containerID="4e90520058f578e3fe086566cbaa2b220e39ddf1f77b6c161c8e55a7db53ca5a" Dec 05 05:36:07 crc kubenswrapper[4652]: I1205 05:36:07.870642 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-g6642" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.066650 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-94kb9"] Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.067006 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovn-controller" containerID="cri-o://fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf" gracePeriod=30 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.067138 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kube-rbac-proxy-node" containerID="cri-o://44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13" gracePeriod=30 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.067104 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="nbdb" containerID="cri-o://eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d" gracePeriod=30 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.067184 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovn-acl-logging" containerID="cri-o://249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659" gracePeriod=30 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.067206 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="northd" containerID="cri-o://97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764" gracePeriod=30 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.067226 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="sbdb" containerID="cri-o://53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db" gracePeriod=30 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.067140 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d" gracePeriod=30 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.096903 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" containerID="cri-o://f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d" gracePeriod=30 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.219444 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/2.log" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.220667 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/1.log" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.220730 4652 generic.go:334] "Generic (PLEG): container finished" podID="57ea6288-d271-498d-ad7e-aa90f3d433e4" containerID="75fb6a2ee0d7dd0a6cf678e06118f8ae72f0a5ec41174418948a48ba3fbb15c1" exitCode=2 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.220819 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-569vn" event={"ID":"57ea6288-d271-498d-ad7e-aa90f3d433e4","Type":"ContainerDied","Data":"75fb6a2ee0d7dd0a6cf678e06118f8ae72f0a5ec41174418948a48ba3fbb15c1"} Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.220899 4652 scope.go:117] "RemoveContainer" containerID="282eba00523004a165c05306bed24733a30945736ad66c80cf9e839abbd1c7c2" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.221539 4652 scope.go:117] "RemoveContainer" containerID="75fb6a2ee0d7dd0a6cf678e06118f8ae72f0a5ec41174418948a48ba3fbb15c1" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.221769 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-569vn_openshift-multus(57ea6288-d271-498d-ad7e-aa90f3d433e4)\"" pod="openshift-multus/multus-569vn" podUID="57ea6288-d271-498d-ad7e-aa90f3d433e4" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.224269 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovnkube-controller/3.log" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.227262 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovn-acl-logging/0.log" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.227738 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovn-controller/0.log" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228254 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d" exitCode=0 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228281 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d" exitCode=0 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228291 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13" exitCode=0 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228299 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659" exitCode=143 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228306 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf" exitCode=143 Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228334 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d"} Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228367 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d"} Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228379 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13"} Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228388 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659"} Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.228398 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf"} Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.259066 4652 scope.go:117] "RemoveContainer" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.341779 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e\": container with ID starting with c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e not found: ID does not exist" containerID="c7466325f60047644f575bd15b1207334d335d56c6733508a67386060f4ea46e" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.344437 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovn-acl-logging/0.log" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.346387 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovn-controller/0.log" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.347025 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.397843 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rbl4b"] Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398094 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398114 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398130 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovn-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398137 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovn-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398145 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovn-acl-logging" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398154 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovn-acl-logging" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398162 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="sbdb" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398169 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="sbdb" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398182 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kubecfg-setup" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398188 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kubecfg-setup" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398198 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398203 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398211 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kube-rbac-proxy-node" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398217 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kube-rbac-proxy-node" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398225 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="nbdb" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398231 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="nbdb" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398238 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="northd" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398243 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="northd" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398252 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398257 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398264 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398269 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398276 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398281 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398381 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovn-acl-logging" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398390 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="sbdb" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398398 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kube-rbac-proxy-node" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398405 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398413 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398420 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovn-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398428 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398434 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="nbdb" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398442 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="northd" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398449 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398456 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: E1205 05:36:09.398580 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398590 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.398689 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" containerName="ovnkube-controller" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.400284 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480112 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-script-lib\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480161 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-openvswitch\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480184 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-ovn\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480212 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-systemd-units\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480230 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-etc-openvswitch\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480244 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-node-log\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480279 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-bin\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480372 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480439 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480619 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-kubelet\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480654 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480650 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480683 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480706 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480710 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480715 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480654 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-node-log" (OuterVolumeSpecName: "node-log") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480761 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480768 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-config\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480928 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvnhn\" (UniqueName: \"kubernetes.io/projected/ab3e4ec7-1775-48b7-8848-a578578629df-kube-api-access-nvnhn\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480967 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ab3e4ec7-1775-48b7-8848-a578578629df-ovn-node-metrics-cert\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.480992 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-log-socket\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481038 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-slash\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481051 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481057 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-var-lib-openvswitch\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481078 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481095 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-env-overrides\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481102 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-log-socket" (OuterVolumeSpecName: "log-socket") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481122 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-slash" (OuterVolumeSpecName: "host-slash") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481131 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-netd\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481156 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-ovn-kubernetes\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481172 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-systemd\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481187 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-netns\") pod \"ab3e4ec7-1775-48b7-8848-a578578629df\" (UID: \"ab3e4ec7-1775-48b7-8848-a578578629df\") " Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481282 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-cni-netd\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481307 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovnkube-script-lib\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481324 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovn-node-metrics-cert\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481342 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-etc-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481366 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-cni-bin\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481388 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovnkube-config\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481396 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481405 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm8j5\" (UniqueName: \"kubernetes.io/projected/becd83a8-bfc5-479b-9613-e584f5b17a7f-kube-api-access-bm8j5\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481426 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-systemd-units\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481449 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481463 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-kubelet\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481478 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-systemd\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481504 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-var-lib-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481525 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481541 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-slash\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481573 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-run-ovn-kubernetes\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481592 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-ovn\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481428 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481643 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481759 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481789 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-log-socket\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481891 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-env-overrides\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481928 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-node-log\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.481960 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-run-netns\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482041 4652 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482055 4652 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482065 4652 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482074 4652 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482082 4652 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482090 4652 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482100 4652 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482107 4652 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482115 4652 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482123 4652 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482131 4652 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482139 4652 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482147 4652 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482155 4652 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ab3e4ec7-1775-48b7-8848-a578578629df-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482163 4652 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482170 4652 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.482178 4652 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.486233 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab3e4ec7-1775-48b7-8848-a578578629df-kube-api-access-nvnhn" (OuterVolumeSpecName: "kube-api-access-nvnhn") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "kube-api-access-nvnhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.486515 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab3e4ec7-1775-48b7-8848-a578578629df-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.492834 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "ab3e4ec7-1775-48b7-8848-a578578629df" (UID: "ab3e4ec7-1775-48b7-8848-a578578629df"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582452 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-env-overrides\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582499 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-node-log\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582521 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-run-netns\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582543 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-cni-netd\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582585 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovnkube-script-lib\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582601 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovn-node-metrics-cert\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582622 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-etc-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582646 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-cni-bin\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582641 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-node-log\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582665 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-systemd-units\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582673 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-cni-netd\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582697 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-run-netns\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582737 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-cni-bin\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582680 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovnkube-config\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582717 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-etc-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582773 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm8j5\" (UniqueName: \"kubernetes.io/projected/becd83a8-bfc5-479b-9613-e584f5b17a7f-kube-api-access-bm8j5\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582802 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-systemd-units\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582806 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582827 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-kubelet\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582841 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-systemd\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582858 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-var-lib-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582884 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582899 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-slash\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582916 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-run-ovn-kubernetes\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582937 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-ovn\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.582977 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-log-socket\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583033 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvnhn\" (UniqueName: \"kubernetes.io/projected/ab3e4ec7-1775-48b7-8848-a578578629df-kube-api-access-nvnhn\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583044 4652 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ab3e4ec7-1775-48b7-8848-a578578629df-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583054 4652 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ab3e4ec7-1775-48b7-8848-a578578629df-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583077 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-log-socket\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583101 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583121 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-slash\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583124 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583141 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-run-ovn-kubernetes\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583163 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-ovn\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583165 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-host-kubelet\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583180 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-run-systemd\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583193 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/becd83a8-bfc5-479b-9613-e584f5b17a7f-var-lib-openvswitch\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583272 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-env-overrides\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583384 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovnkube-script-lib\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.583398 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovnkube-config\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.585649 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/becd83a8-bfc5-479b-9613-e584f5b17a7f-ovn-node-metrics-cert\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.595658 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm8j5\" (UniqueName: \"kubernetes.io/projected/becd83a8-bfc5-479b-9613-e584f5b17a7f-kube-api-access-bm8j5\") pod \"ovnkube-node-rbl4b\" (UID: \"becd83a8-bfc5-479b-9613-e584f5b17a7f\") " pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:09 crc kubenswrapper[4652]: I1205 05:36:09.713039 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.235150 4652 generic.go:334] "Generic (PLEG): container finished" podID="becd83a8-bfc5-479b-9613-e584f5b17a7f" containerID="6aeba465f1179f802098b22fa55b3c54191f356d3cef4875572053849b8d570e" exitCode=0 Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.235255 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerDied","Data":"6aeba465f1179f802098b22fa55b3c54191f356d3cef4875572053849b8d570e"} Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.235325 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"b84f96da7969f9b3940941f127be18c471b5ea08d4129a6259c240f08d56b643"} Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.236804 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/2.log" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.243130 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovn-acl-logging/0.log" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.243855 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-94kb9_ab3e4ec7-1775-48b7-8848-a578578629df/ovn-controller/0.log" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244289 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db" exitCode=0 Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244317 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d" exitCode=0 Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244329 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab3e4ec7-1775-48b7-8848-a578578629df" containerID="97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764" exitCode=0 Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244364 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db"} Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244393 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d"} Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244405 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764"} Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244409 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244435 4652 scope.go:117] "RemoveContainer" containerID="f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.244415 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-94kb9" event={"ID":"ab3e4ec7-1775-48b7-8848-a578578629df","Type":"ContainerDied","Data":"45151eb2c12f06db353156ae9ada8e149602f0efe070c7256168fc8f9b25b890"} Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.264509 4652 scope.go:117] "RemoveContainer" containerID="53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.278762 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-94kb9"] Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.283149 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-94kb9"] Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.289351 4652 scope.go:117] "RemoveContainer" containerID="eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.301867 4652 scope.go:117] "RemoveContainer" containerID="97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.313459 4652 scope.go:117] "RemoveContainer" containerID="1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.323852 4652 scope.go:117] "RemoveContainer" containerID="44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.335792 4652 scope.go:117] "RemoveContainer" containerID="249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.350727 4652 scope.go:117] "RemoveContainer" containerID="fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.373096 4652 scope.go:117] "RemoveContainer" containerID="d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.388283 4652 scope.go:117] "RemoveContainer" containerID="f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.388659 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d\": container with ID starting with f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d not found: ID does not exist" containerID="f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.388758 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d"} err="failed to get container status \"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d\": rpc error: code = NotFound desc = could not find container \"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d\": container with ID starting with f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.388803 4652 scope.go:117] "RemoveContainer" containerID="53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.389325 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\": container with ID starting with 53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db not found: ID does not exist" containerID="53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.389381 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db"} err="failed to get container status \"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\": rpc error: code = NotFound desc = could not find container \"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\": container with ID starting with 53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.389425 4652 scope.go:117] "RemoveContainer" containerID="eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.389780 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\": container with ID starting with eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d not found: ID does not exist" containerID="eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.389818 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d"} err="failed to get container status \"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\": rpc error: code = NotFound desc = could not find container \"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\": container with ID starting with eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.389844 4652 scope.go:117] "RemoveContainer" containerID="97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.390095 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\": container with ID starting with 97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764 not found: ID does not exist" containerID="97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.390127 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764"} err="failed to get container status \"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\": rpc error: code = NotFound desc = could not find container \"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\": container with ID starting with 97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.390179 4652 scope.go:117] "RemoveContainer" containerID="1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.391862 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\": container with ID starting with 1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d not found: ID does not exist" containerID="1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.392360 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d"} err="failed to get container status \"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\": rpc error: code = NotFound desc = could not find container \"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\": container with ID starting with 1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.392391 4652 scope.go:117] "RemoveContainer" containerID="44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.397187 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\": container with ID starting with 44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13 not found: ID does not exist" containerID="44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.397222 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13"} err="failed to get container status \"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\": rpc error: code = NotFound desc = could not find container \"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\": container with ID starting with 44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.397244 4652 scope.go:117] "RemoveContainer" containerID="249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.398330 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\": container with ID starting with 249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659 not found: ID does not exist" containerID="249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.398378 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659"} err="failed to get container status \"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\": rpc error: code = NotFound desc = could not find container \"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\": container with ID starting with 249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.398414 4652 scope.go:117] "RemoveContainer" containerID="fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.399339 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\": container with ID starting with fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf not found: ID does not exist" containerID="fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.399388 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf"} err="failed to get container status \"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\": rpc error: code = NotFound desc = could not find container \"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\": container with ID starting with fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.399414 4652 scope.go:117] "RemoveContainer" containerID="d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae" Dec 05 05:36:10 crc kubenswrapper[4652]: E1205 05:36:10.400208 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\": container with ID starting with d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae not found: ID does not exist" containerID="d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.400261 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae"} err="failed to get container status \"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\": rpc error: code = NotFound desc = could not find container \"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\": container with ID starting with d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.400285 4652 scope.go:117] "RemoveContainer" containerID="f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.400524 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d"} err="failed to get container status \"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d\": rpc error: code = NotFound desc = could not find container \"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d\": container with ID starting with f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.400572 4652 scope.go:117] "RemoveContainer" containerID="53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.400846 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db"} err="failed to get container status \"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\": rpc error: code = NotFound desc = could not find container \"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\": container with ID starting with 53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.400881 4652 scope.go:117] "RemoveContainer" containerID="eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.401242 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d"} err="failed to get container status \"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\": rpc error: code = NotFound desc = could not find container \"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\": container with ID starting with eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.401274 4652 scope.go:117] "RemoveContainer" containerID="97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.401618 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764"} err="failed to get container status \"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\": rpc error: code = NotFound desc = could not find container \"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\": container with ID starting with 97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.401640 4652 scope.go:117] "RemoveContainer" containerID="1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.401985 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d"} err="failed to get container status \"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\": rpc error: code = NotFound desc = could not find container \"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\": container with ID starting with 1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.402010 4652 scope.go:117] "RemoveContainer" containerID="44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.402293 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13"} err="failed to get container status \"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\": rpc error: code = NotFound desc = could not find container \"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\": container with ID starting with 44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.402317 4652 scope.go:117] "RemoveContainer" containerID="249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.402766 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659"} err="failed to get container status \"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\": rpc error: code = NotFound desc = could not find container \"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\": container with ID starting with 249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.402789 4652 scope.go:117] "RemoveContainer" containerID="fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.403110 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf"} err="failed to get container status \"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\": rpc error: code = NotFound desc = could not find container \"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\": container with ID starting with fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.403130 4652 scope.go:117] "RemoveContainer" containerID="d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.403450 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae"} err="failed to get container status \"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\": rpc error: code = NotFound desc = could not find container \"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\": container with ID starting with d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.403476 4652 scope.go:117] "RemoveContainer" containerID="f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.403847 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d"} err="failed to get container status \"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d\": rpc error: code = NotFound desc = could not find container \"f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d\": container with ID starting with f057c406fca279229e03b7c649d20dd5096e86e192f1cf87c51e22c303d4bc6d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.403871 4652 scope.go:117] "RemoveContainer" containerID="53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.404144 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db"} err="failed to get container status \"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\": rpc error: code = NotFound desc = could not find container \"53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db\": container with ID starting with 53737686a3743a833331dffd49a46c3fb74723e90b1009186e6ae075a51aa1db not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.404161 4652 scope.go:117] "RemoveContainer" containerID="eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.404461 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d"} err="failed to get container status \"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\": rpc error: code = NotFound desc = could not find container \"eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d\": container with ID starting with eac94fcc755c528971ab5135d5d07e4bcb6a50a25a1a6d88769265d07cfe935d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.404478 4652 scope.go:117] "RemoveContainer" containerID="97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.404827 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764"} err="failed to get container status \"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\": rpc error: code = NotFound desc = could not find container \"97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764\": container with ID starting with 97bfd934ac6d3ce5f4cf6ed7327bf91d551685b9eceb1e77510e0d750b60e764 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.404854 4652 scope.go:117] "RemoveContainer" containerID="1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.405741 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d"} err="failed to get container status \"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\": rpc error: code = NotFound desc = could not find container \"1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d\": container with ID starting with 1ae925b889123f7ab5ee679e4d179a236fb0336f406213c2b575488442986a4d not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.405780 4652 scope.go:117] "RemoveContainer" containerID="44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.406357 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13"} err="failed to get container status \"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\": rpc error: code = NotFound desc = could not find container \"44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13\": container with ID starting with 44303293dd72d72fd2b6f5b80907506f78700e637335c7fb935d4fb548924a13 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.406602 4652 scope.go:117] "RemoveContainer" containerID="249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.406904 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659"} err="failed to get container status \"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\": rpc error: code = NotFound desc = could not find container \"249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659\": container with ID starting with 249efa8253cd06e29346fa48c8612f37a8427594924ef3d466afb40cbbf94659 not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.406924 4652 scope.go:117] "RemoveContainer" containerID="fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.407300 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf"} err="failed to get container status \"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\": rpc error: code = NotFound desc = could not find container \"fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf\": container with ID starting with fde3085ba13be64c27efbb77e4bd43e15b8520b010d6173c997e07e63237accf not found: ID does not exist" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.407322 4652 scope.go:117] "RemoveContainer" containerID="d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae" Dec 05 05:36:10 crc kubenswrapper[4652]: I1205 05:36:10.407594 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae"} err="failed to get container status \"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\": rpc error: code = NotFound desc = could not find container \"d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae\": container with ID starting with d5b8c95a4edfe23261eea5abaa32ebee2ede7ecd670baa3b0d9c61f187805fae not found: ID does not exist" Dec 05 05:36:11 crc kubenswrapper[4652]: I1205 05:36:11.254633 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"d078f13dc53fe1d6295de697be009c4b241aa0f4b636371fef261cbd05aca4ae"} Dec 05 05:36:11 crc kubenswrapper[4652]: I1205 05:36:11.255000 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"93df4651d5d5a776d1e3559486bb35907e9447f486334a821ad06b2b70ccebc3"} Dec 05 05:36:11 crc kubenswrapper[4652]: I1205 05:36:11.255011 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"ae283c8bed51dcea2b2c3e84702b1d01bfef0013ef6e25812b1cad89ba014cf5"} Dec 05 05:36:11 crc kubenswrapper[4652]: I1205 05:36:11.255021 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"32170d4ea3a96c90b2fbfbb1f620cdcf065bde4e5f73af34afdbb4d2031e139c"} Dec 05 05:36:11 crc kubenswrapper[4652]: I1205 05:36:11.255030 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"3c6678ae2db8470320a319053e80e5fa137a9020e8166515aaef0bb7bb0224bb"} Dec 05 05:36:11 crc kubenswrapper[4652]: I1205 05:36:11.255037 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"e3f59346129358396484c000574c42cd9f8eac687826e3d63045db9ac89b05ed"} Dec 05 05:36:12 crc kubenswrapper[4652]: I1205 05:36:12.131333 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab3e4ec7-1775-48b7-8848-a578578629df" path="/var/lib/kubelet/pods/ab3e4ec7-1775-48b7-8848-a578578629df/volumes" Dec 05 05:36:13 crc kubenswrapper[4652]: I1205 05:36:13.267978 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"212410d7b0fba137b59151b18bd6bcb90f95cec8d8816773ba888768bd9ac7b4"} Dec 05 05:36:15 crc kubenswrapper[4652]: I1205 05:36:15.280485 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" event={"ID":"becd83a8-bfc5-479b-9613-e584f5b17a7f","Type":"ContainerStarted","Data":"57786147e14cf3faaafe09d46902d6c4a7900dd5a5a238dbe9fe5249cd75a319"} Dec 05 05:36:15 crc kubenswrapper[4652]: I1205 05:36:15.280797 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:15 crc kubenswrapper[4652]: I1205 05:36:15.280811 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:15 crc kubenswrapper[4652]: I1205 05:36:15.280820 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:15 crc kubenswrapper[4652]: I1205 05:36:15.304884 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" podStartSLOduration=6.304854467 podStartE2EDuration="6.304854467s" podCreationTimestamp="2025-12-05 05:36:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:36:15.302810241 +0000 UTC m=+577.539540508" watchObservedRunningTime="2025-12-05 05:36:15.304854467 +0000 UTC m=+577.541584733" Dec 05 05:36:15 crc kubenswrapper[4652]: I1205 05:36:15.305572 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:15 crc kubenswrapper[4652]: I1205 05:36:15.305909 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:23 crc kubenswrapper[4652]: I1205 05:36:23.125605 4652 scope.go:117] "RemoveContainer" containerID="75fb6a2ee0d7dd0a6cf678e06118f8ae72f0a5ec41174418948a48ba3fbb15c1" Dec 05 05:36:23 crc kubenswrapper[4652]: E1205 05:36:23.126476 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-569vn_openshift-multus(57ea6288-d271-498d-ad7e-aa90f3d433e4)\"" pod="openshift-multus/multus-569vn" podUID="57ea6288-d271-498d-ad7e-aa90f3d433e4" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.014620 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468"] Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.015927 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.017718 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.021392 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468"] Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.123801 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.124006 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4r2t\" (UniqueName: \"kubernetes.io/projected/d005ccd7-aab2-4b15-a71d-fda77a89070b-kube-api-access-c4r2t\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.124094 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.224726 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4r2t\" (UniqueName: \"kubernetes.io/projected/d005ccd7-aab2-4b15-a71d-fda77a89070b-kube-api-access-c4r2t\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.224787 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.224915 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.225388 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.225419 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.243806 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4r2t\" (UniqueName: \"kubernetes.io/projected/d005ccd7-aab2-4b15-a71d-fda77a89070b-kube-api-access-c4r2t\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.329879 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: E1205 05:36:30.352749 4652 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace_d005ccd7-aab2-4b15-a71d-fda77a89070b_0(3cfe8d4fd329d730cb3b8aae1d7dfac0340cc83368a94447f4d8425128dd94be): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 05:36:30 crc kubenswrapper[4652]: E1205 05:36:30.352822 4652 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace_d005ccd7-aab2-4b15-a71d-fda77a89070b_0(3cfe8d4fd329d730cb3b8aae1d7dfac0340cc83368a94447f4d8425128dd94be): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: E1205 05:36:30.352851 4652 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace_d005ccd7-aab2-4b15-a71d-fda77a89070b_0(3cfe8d4fd329d730cb3b8aae1d7dfac0340cc83368a94447f4d8425128dd94be): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: E1205 05:36:30.352915 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace(d005ccd7-aab2-4b15-a71d-fda77a89070b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace(d005ccd7-aab2-4b15-a71d-fda77a89070b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace_d005ccd7-aab2-4b15-a71d-fda77a89070b_0(3cfe8d4fd329d730cb3b8aae1d7dfac0340cc83368a94447f4d8425128dd94be): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.357398 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: I1205 05:36:30.357811 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: E1205 05:36:30.375056 4652 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace_d005ccd7-aab2-4b15-a71d-fda77a89070b_0(46d7c4ab0a27041afffbc431d88aaefc94518e870839b5db6587ffefd8960b7b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 05:36:30 crc kubenswrapper[4652]: E1205 05:36:30.375129 4652 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace_d005ccd7-aab2-4b15-a71d-fda77a89070b_0(46d7c4ab0a27041afffbc431d88aaefc94518e870839b5db6587ffefd8960b7b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: E1205 05:36:30.375160 4652 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace_d005ccd7-aab2-4b15-a71d-fda77a89070b_0(46d7c4ab0a27041afffbc431d88aaefc94518e870839b5db6587ffefd8960b7b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:30 crc kubenswrapper[4652]: E1205 05:36:30.375230 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace(d005ccd7-aab2-4b15-a71d-fda77a89070b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace(d005ccd7-aab2-4b15-a71d-fda77a89070b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_openshift-marketplace_d005ccd7-aab2-4b15-a71d-fda77a89070b_0(46d7c4ab0a27041afffbc431d88aaefc94518e870839b5db6587ffefd8960b7b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" Dec 05 05:36:37 crc kubenswrapper[4652]: I1205 05:36:37.126133 4652 scope.go:117] "RemoveContainer" containerID="75fb6a2ee0d7dd0a6cf678e06118f8ae72f0a5ec41174418948a48ba3fbb15c1" Dec 05 05:36:37 crc kubenswrapper[4652]: I1205 05:36:37.393445 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-569vn_57ea6288-d271-498d-ad7e-aa90f3d433e4/kube-multus/2.log" Dec 05 05:36:37 crc kubenswrapper[4652]: I1205 05:36:37.393682 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-569vn" event={"ID":"57ea6288-d271-498d-ad7e-aa90f3d433e4","Type":"ContainerStarted","Data":"294b374dfd4cce25276f4352ecaff80333625f5a90b270021384ad26a1f2084b"} Dec 05 05:36:39 crc kubenswrapper[4652]: I1205 05:36:39.732263 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rbl4b" Dec 05 05:36:45 crc kubenswrapper[4652]: I1205 05:36:45.124691 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:45 crc kubenswrapper[4652]: I1205 05:36:45.125264 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:45 crc kubenswrapper[4652]: I1205 05:36:45.473052 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468"] Dec 05 05:36:46 crc kubenswrapper[4652]: I1205 05:36:46.439040 4652 generic.go:334] "Generic (PLEG): container finished" podID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerID="782ff6ea59bec599589dca61c2d85e077f9e6e0c62e41cf13dab497df8b0f3fd" exitCode=0 Dec 05 05:36:46 crc kubenswrapper[4652]: I1205 05:36:46.439255 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" event={"ID":"d005ccd7-aab2-4b15-a71d-fda77a89070b","Type":"ContainerDied","Data":"782ff6ea59bec599589dca61c2d85e077f9e6e0c62e41cf13dab497df8b0f3fd"} Dec 05 05:36:46 crc kubenswrapper[4652]: I1205 05:36:46.439493 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" event={"ID":"d005ccd7-aab2-4b15-a71d-fda77a89070b","Type":"ContainerStarted","Data":"ba89f64668d99e4495ee50325da24fca515bfc61d5319105d0e957f78d83bc8f"} Dec 05 05:36:48 crc kubenswrapper[4652]: I1205 05:36:48.452480 4652 generic.go:334] "Generic (PLEG): container finished" podID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerID="644efe7c5ba9af5b0e6eb148130f55d90916ff873a4483f8767f0acf6dd728a9" exitCode=0 Dec 05 05:36:48 crc kubenswrapper[4652]: I1205 05:36:48.452577 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" event={"ID":"d005ccd7-aab2-4b15-a71d-fda77a89070b","Type":"ContainerDied","Data":"644efe7c5ba9af5b0e6eb148130f55d90916ff873a4483f8767f0acf6dd728a9"} Dec 05 05:36:49 crc kubenswrapper[4652]: I1205 05:36:49.461038 4652 generic.go:334] "Generic (PLEG): container finished" podID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerID="449c9d6aca1ddadf27a56fd8c7fe38735ab85473e0b5fbfc5bf89866e89b0cdc" exitCode=0 Dec 05 05:36:49 crc kubenswrapper[4652]: I1205 05:36:49.461110 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" event={"ID":"d005ccd7-aab2-4b15-a71d-fda77a89070b","Type":"ContainerDied","Data":"449c9d6aca1ddadf27a56fd8c7fe38735ab85473e0b5fbfc5bf89866e89b0cdc"} Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.650574 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.651912 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4r2t\" (UniqueName: \"kubernetes.io/projected/d005ccd7-aab2-4b15-a71d-fda77a89070b-kube-api-access-c4r2t\") pod \"d005ccd7-aab2-4b15-a71d-fda77a89070b\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.651942 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-util\") pod \"d005ccd7-aab2-4b15-a71d-fda77a89070b\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.651968 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-bundle\") pod \"d005ccd7-aab2-4b15-a71d-fda77a89070b\" (UID: \"d005ccd7-aab2-4b15-a71d-fda77a89070b\") " Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.654722 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-bundle" (OuterVolumeSpecName: "bundle") pod "d005ccd7-aab2-4b15-a71d-fda77a89070b" (UID: "d005ccd7-aab2-4b15-a71d-fda77a89070b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.657075 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d005ccd7-aab2-4b15-a71d-fda77a89070b-kube-api-access-c4r2t" (OuterVolumeSpecName: "kube-api-access-c4r2t") pod "d005ccd7-aab2-4b15-a71d-fda77a89070b" (UID: "d005ccd7-aab2-4b15-a71d-fda77a89070b"). InnerVolumeSpecName "kube-api-access-c4r2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.662612 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-util" (OuterVolumeSpecName: "util") pod "d005ccd7-aab2-4b15-a71d-fda77a89070b" (UID: "d005ccd7-aab2-4b15-a71d-fda77a89070b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.753220 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4r2t\" (UniqueName: \"kubernetes.io/projected/d005ccd7-aab2-4b15-a71d-fda77a89070b-kube-api-access-c4r2t\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.753280 4652 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-util\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:50 crc kubenswrapper[4652]: I1205 05:36:50.753292 4652 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d005ccd7-aab2-4b15-a71d-fda77a89070b-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:36:51 crc kubenswrapper[4652]: I1205 05:36:51.473585 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" event={"ID":"d005ccd7-aab2-4b15-a71d-fda77a89070b","Type":"ContainerDied","Data":"ba89f64668d99e4495ee50325da24fca515bfc61d5319105d0e957f78d83bc8f"} Dec 05 05:36:51 crc kubenswrapper[4652]: I1205 05:36:51.473631 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba89f64668d99e4495ee50325da24fca515bfc61d5319105d0e957f78d83bc8f" Dec 05 05:36:51 crc kubenswrapper[4652]: I1205 05:36:51.473902 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.412260 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt"] Dec 05 05:37:01 crc kubenswrapper[4652]: E1205 05:37:01.413047 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerName="util" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.413061 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerName="util" Dec 05 05:37:01 crc kubenswrapper[4652]: E1205 05:37:01.413074 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerName="extract" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.413079 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerName="extract" Dec 05 05:37:01 crc kubenswrapper[4652]: E1205 05:37:01.413098 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerName="pull" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.413106 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerName="pull" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.413204 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d005ccd7-aab2-4b15-a71d-fda77a89070b" containerName="extract" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.413601 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.420670 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-2lrd2" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.420934 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.420970 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.429607 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.463609 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.464346 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.470360 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-bw9jd" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.470591 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.484278 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.484846 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.484922 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.497505 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.576699 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/720842c0-3317-47df-a8b0-99380921e48d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr\" (UID: \"720842c0-3317-47df-a8b0-99380921e48d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.576753 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/720842c0-3317-47df-a8b0-99380921e48d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr\" (UID: \"720842c0-3317-47df-a8b0-99380921e48d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.576938 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55ctg\" (UniqueName: \"kubernetes.io/projected/3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a-kube-api-access-55ctg\") pod \"obo-prometheus-operator-668cf9dfbb-rnzbt\" (UID: \"3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.639734 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7lgmr"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.640689 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.643311 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-6lcth" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.649857 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.654592 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7lgmr"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.678724 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/720842c0-3317-47df-a8b0-99380921e48d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr\" (UID: \"720842c0-3317-47df-a8b0-99380921e48d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.678772 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/720842c0-3317-47df-a8b0-99380921e48d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr\" (UID: \"720842c0-3317-47df-a8b0-99380921e48d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.678803 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d9eec5b9-7356-430d-b54a-53451f7eeeb6-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6\" (UID: \"d9eec5b9-7356-430d-b54a-53451f7eeeb6\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.678845 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69ltw\" (UniqueName: \"kubernetes.io/projected/4c7f2023-f868-4f76-a833-44988a07307d-kube-api-access-69ltw\") pod \"observability-operator-d8bb48f5d-7lgmr\" (UID: \"4c7f2023-f868-4f76-a833-44988a07307d\") " pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.678898 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55ctg\" (UniqueName: \"kubernetes.io/projected/3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a-kube-api-access-55ctg\") pod \"obo-prometheus-operator-668cf9dfbb-rnzbt\" (UID: \"3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.678929 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/4c7f2023-f868-4f76-a833-44988a07307d-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7lgmr\" (UID: \"4c7f2023-f868-4f76-a833-44988a07307d\") " pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.678979 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d9eec5b9-7356-430d-b54a-53451f7eeeb6-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6\" (UID: \"d9eec5b9-7356-430d-b54a-53451f7eeeb6\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.685084 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/720842c0-3317-47df-a8b0-99380921e48d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr\" (UID: \"720842c0-3317-47df-a8b0-99380921e48d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.688312 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/720842c0-3317-47df-a8b0-99380921e48d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr\" (UID: \"720842c0-3317-47df-a8b0-99380921e48d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.698109 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55ctg\" (UniqueName: \"kubernetes.io/projected/3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a-kube-api-access-55ctg\") pod \"obo-prometheus-operator-668cf9dfbb-rnzbt\" (UID: \"3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.751063 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.779795 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d9eec5b9-7356-430d-b54a-53451f7eeeb6-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6\" (UID: \"d9eec5b9-7356-430d-b54a-53451f7eeeb6\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.780060 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d9eec5b9-7356-430d-b54a-53451f7eeeb6-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6\" (UID: \"d9eec5b9-7356-430d-b54a-53451f7eeeb6\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.780087 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69ltw\" (UniqueName: \"kubernetes.io/projected/4c7f2023-f868-4f76-a833-44988a07307d-kube-api-access-69ltw\") pod \"observability-operator-d8bb48f5d-7lgmr\" (UID: \"4c7f2023-f868-4f76-a833-44988a07307d\") " pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.780118 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/4c7f2023-f868-4f76-a833-44988a07307d-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7lgmr\" (UID: \"4c7f2023-f868-4f76-a833-44988a07307d\") " pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.781399 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.784339 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/4c7f2023-f868-4f76-a833-44988a07307d-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7lgmr\" (UID: \"4c7f2023-f868-4f76-a833-44988a07307d\") " pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.784431 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d9eec5b9-7356-430d-b54a-53451f7eeeb6-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6\" (UID: \"d9eec5b9-7356-430d-b54a-53451f7eeeb6\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.784678 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d9eec5b9-7356-430d-b54a-53451f7eeeb6-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6\" (UID: \"d9eec5b9-7356-430d-b54a-53451f7eeeb6\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.793628 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69ltw\" (UniqueName: \"kubernetes.io/projected/4c7f2023-f868-4f76-a833-44988a07307d-kube-api-access-69ltw\") pod \"observability-operator-d8bb48f5d-7lgmr\" (UID: \"4c7f2023-f868-4f76-a833-44988a07307d\") " pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.808947 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.829674 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hgff6"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.830570 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.835217 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-fx9bl" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.841746 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hgff6"] Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.952956 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.987245 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq6mn\" (UniqueName: \"kubernetes.io/projected/b8cbd621-1925-4df5-8a63-78c0cc735339-kube-api-access-sq6mn\") pod \"perses-operator-5446b9c989-hgff6\" (UID: \"b8cbd621-1925-4df5-8a63-78c0cc735339\") " pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:01 crc kubenswrapper[4652]: I1205 05:37:01.987595 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/b8cbd621-1925-4df5-8a63-78c0cc735339-openshift-service-ca\") pod \"perses-operator-5446b9c989-hgff6\" (UID: \"b8cbd621-1925-4df5-8a63-78c0cc735339\") " pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.040263 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr"] Dec 05 05:37:02 crc kubenswrapper[4652]: W1205 05:37:02.053000 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod720842c0_3317_47df_a8b0_99380921e48d.slice/crio-e925c00d8487c2cb125c467e3db9c5dd819927dfa2f1314094023be1e1b6e0ad WatchSource:0}: Error finding container e925c00d8487c2cb125c467e3db9c5dd819927dfa2f1314094023be1e1b6e0ad: Status 404 returned error can't find the container with id e925c00d8487c2cb125c467e3db9c5dd819927dfa2f1314094023be1e1b6e0ad Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.087630 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6"] Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.088288 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/b8cbd621-1925-4df5-8a63-78c0cc735339-openshift-service-ca\") pod \"perses-operator-5446b9c989-hgff6\" (UID: \"b8cbd621-1925-4df5-8a63-78c0cc735339\") " pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.088417 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq6mn\" (UniqueName: \"kubernetes.io/projected/b8cbd621-1925-4df5-8a63-78c0cc735339-kube-api-access-sq6mn\") pod \"perses-operator-5446b9c989-hgff6\" (UID: \"b8cbd621-1925-4df5-8a63-78c0cc735339\") " pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.089191 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/b8cbd621-1925-4df5-8a63-78c0cc735339-openshift-service-ca\") pod \"perses-operator-5446b9c989-hgff6\" (UID: \"b8cbd621-1925-4df5-8a63-78c0cc735339\") " pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:02 crc kubenswrapper[4652]: W1205 05:37:02.091758 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9eec5b9_7356_430d_b54a_53451f7eeeb6.slice/crio-57d918cc7210274a2eba791ee489bda882601525e43bbef1fa0f2d9c1e96dcf9 WatchSource:0}: Error finding container 57d918cc7210274a2eba791ee489bda882601525e43bbef1fa0f2d9c1e96dcf9: Status 404 returned error can't find the container with id 57d918cc7210274a2eba791ee489bda882601525e43bbef1fa0f2d9c1e96dcf9 Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.105991 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq6mn\" (UniqueName: \"kubernetes.io/projected/b8cbd621-1925-4df5-8a63-78c0cc735339-kube-api-access-sq6mn\") pod \"perses-operator-5446b9c989-hgff6\" (UID: \"b8cbd621-1925-4df5-8a63-78c0cc735339\") " pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.143874 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7lgmr"] Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.148861 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:02 crc kubenswrapper[4652]: W1205 05:37:02.165409 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c7f2023_f868_4f76_a833_44988a07307d.slice/crio-b5961af8fe6065c846447531081d5a6aabae2deda65ca1fe11e28f4639756e67 WatchSource:0}: Error finding container b5961af8fe6065c846447531081d5a6aabae2deda65ca1fe11e28f4639756e67: Status 404 returned error can't find the container with id b5961af8fe6065c846447531081d5a6aabae2deda65ca1fe11e28f4639756e67 Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.207583 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt"] Dec 05 05:37:02 crc kubenswrapper[4652]: W1205 05:37:02.212699 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a6c3b19_b3a2_47a0_b6e6_0ac9b2a6a48a.slice/crio-956906d84736fdbb7686e3edc75e3f2ad84154390ce3093094eb05e0740fd581 WatchSource:0}: Error finding container 956906d84736fdbb7686e3edc75e3f2ad84154390ce3093094eb05e0740fd581: Status 404 returned error can't find the container with id 956906d84736fdbb7686e3edc75e3f2ad84154390ce3093094eb05e0740fd581 Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.324101 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hgff6"] Dec 05 05:37:02 crc kubenswrapper[4652]: W1205 05:37:02.324377 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8cbd621_1925_4df5_8a63_78c0cc735339.slice/crio-5854a12b17e0e209e77a4a80c07e078ed2cca0c1df128ca71bb1198dd33ddb8d WatchSource:0}: Error finding container 5854a12b17e0e209e77a4a80c07e078ed2cca0c1df128ca71bb1198dd33ddb8d: Status 404 returned error can't find the container with id 5854a12b17e0e209e77a4a80c07e078ed2cca0c1df128ca71bb1198dd33ddb8d Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.534366 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-hgff6" event={"ID":"b8cbd621-1925-4df5-8a63-78c0cc735339","Type":"ContainerStarted","Data":"5854a12b17e0e209e77a4a80c07e078ed2cca0c1df128ca71bb1198dd33ddb8d"} Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.535640 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" event={"ID":"d9eec5b9-7356-430d-b54a-53451f7eeeb6","Type":"ContainerStarted","Data":"57d918cc7210274a2eba791ee489bda882601525e43bbef1fa0f2d9c1e96dcf9"} Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.536835 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" event={"ID":"720842c0-3317-47df-a8b0-99380921e48d","Type":"ContainerStarted","Data":"e925c00d8487c2cb125c467e3db9c5dd819927dfa2f1314094023be1e1b6e0ad"} Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.537988 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt" event={"ID":"3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a","Type":"ContainerStarted","Data":"956906d84736fdbb7686e3edc75e3f2ad84154390ce3093094eb05e0740fd581"} Dec 05 05:37:02 crc kubenswrapper[4652]: I1205 05:37:02.538890 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" event={"ID":"4c7f2023-f868-4f76-a833-44988a07307d","Type":"ContainerStarted","Data":"b5961af8fe6065c846447531081d5a6aabae2deda65ca1fe11e28f4639756e67"} Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.606571 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" event={"ID":"720842c0-3317-47df-a8b0-99380921e48d","Type":"ContainerStarted","Data":"343b21acb3f7cad8d867810031c665799d098f1297b5d8b8b57ec9f5c2dd7289"} Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.608173 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt" event={"ID":"3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a","Type":"ContainerStarted","Data":"0dd0879caddb6a5ca6422be33248b1edcb313d27d8ae5f98913c67d2d313732b"} Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.609550 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" event={"ID":"4c7f2023-f868-4f76-a833-44988a07307d","Type":"ContainerStarted","Data":"e3e386f1b5971b5e52c9d9dafd95900f915096551b09a66e00360275b3f41db0"} Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.609897 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.611694 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.612321 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-hgff6" event={"ID":"b8cbd621-1925-4df5-8a63-78c0cc735339","Type":"ContainerStarted","Data":"d1c7b99bf2df46d31be6742b12dce145210915496ff3f5a13774a46eb4967185"} Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.612380 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.614058 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" event={"ID":"d9eec5b9-7356-430d-b54a-53451f7eeeb6","Type":"ContainerStarted","Data":"bbf1a7541914d282a77f262d355d2e858e81473906daf7220c75b6e397c5bb89"} Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.628154 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr" podStartSLOduration=1.509593084 podStartE2EDuration="11.628143748s" podCreationTimestamp="2025-12-05 05:37:01 +0000 UTC" firstStartedPulling="2025-12-05 05:37:02.057583277 +0000 UTC m=+624.294313544" lastFinishedPulling="2025-12-05 05:37:12.176133941 +0000 UTC m=+634.412864208" observedRunningTime="2025-12-05 05:37:12.625530002 +0000 UTC m=+634.862260269" watchObservedRunningTime="2025-12-05 05:37:12.628143748 +0000 UTC m=+634.864874015" Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.639885 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6" podStartSLOduration=1.573453102 podStartE2EDuration="11.639875525s" podCreationTimestamp="2025-12-05 05:37:01 +0000 UTC" firstStartedPulling="2025-12-05 05:37:02.095642794 +0000 UTC m=+624.332373052" lastFinishedPulling="2025-12-05 05:37:12.162065208 +0000 UTC m=+634.398795475" observedRunningTime="2025-12-05 05:37:12.639871969 +0000 UTC m=+634.876602235" watchObservedRunningTime="2025-12-05 05:37:12.639875525 +0000 UTC m=+634.876605792" Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.658708 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-hgff6" podStartSLOduration=1.805486405 podStartE2EDuration="11.658692409s" podCreationTimestamp="2025-12-05 05:37:01 +0000 UTC" firstStartedPulling="2025-12-05 05:37:02.327046393 +0000 UTC m=+624.563776660" lastFinishedPulling="2025-12-05 05:37:12.180252397 +0000 UTC m=+634.416982664" observedRunningTime="2025-12-05 05:37:12.658443621 +0000 UTC m=+634.895173887" watchObservedRunningTime="2025-12-05 05:37:12.658692409 +0000 UTC m=+634.895422676" Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.676933 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-7lgmr" podStartSLOduration=1.6275789600000001 podStartE2EDuration="11.676913452s" podCreationTimestamp="2025-12-05 05:37:01 +0000 UTC" firstStartedPulling="2025-12-05 05:37:02.171123791 +0000 UTC m=+624.407854058" lastFinishedPulling="2025-12-05 05:37:12.220458293 +0000 UTC m=+634.457188550" observedRunningTime="2025-12-05 05:37:12.674618285 +0000 UTC m=+634.911348552" watchObservedRunningTime="2025-12-05 05:37:12.676913452 +0000 UTC m=+634.913643719" Dec 05 05:37:12 crc kubenswrapper[4652]: I1205 05:37:12.700931 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-rnzbt" podStartSLOduration=1.721039335 podStartE2EDuration="11.70091599s" podCreationTimestamp="2025-12-05 05:37:01 +0000 UTC" firstStartedPulling="2025-12-05 05:37:02.215054632 +0000 UTC m=+624.451784899" lastFinishedPulling="2025-12-05 05:37:12.194931287 +0000 UTC m=+634.431661554" observedRunningTime="2025-12-05 05:37:12.697308193 +0000 UTC m=+634.934038460" watchObservedRunningTime="2025-12-05 05:37:12.70091599 +0000 UTC m=+634.937646256" Dec 05 05:37:22 crc kubenswrapper[4652]: I1205 05:37:22.151624 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-hgff6" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.754362 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4"] Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.755791 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.757162 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.762312 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4"] Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.866310 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95sp7\" (UniqueName: \"kubernetes.io/projected/e5af0d49-ade4-4624-9dab-984dab1bfcca-kube-api-access-95sp7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.866373 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.866432 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.967473 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95sp7\" (UniqueName: \"kubernetes.io/projected/e5af0d49-ade4-4624-9dab-984dab1bfcca-kube-api-access-95sp7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.967589 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.967635 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.968086 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.968140 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:36 crc kubenswrapper[4652]: I1205 05:37:36.986973 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95sp7\" (UniqueName: \"kubernetes.io/projected/e5af0d49-ade4-4624-9dab-984dab1bfcca-kube-api-access-95sp7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:37 crc kubenswrapper[4652]: I1205 05:37:37.067956 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:37 crc kubenswrapper[4652]: I1205 05:37:37.291757 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4"] Dec 05 05:37:37 crc kubenswrapper[4652]: I1205 05:37:37.747968 4652 generic.go:334] "Generic (PLEG): container finished" podID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerID="f2f1cba7d39c6c3a317bfa299483b394375b12e479bb1be4f1a65458e2729e03" exitCode=0 Dec 05 05:37:37 crc kubenswrapper[4652]: I1205 05:37:37.748050 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" event={"ID":"e5af0d49-ade4-4624-9dab-984dab1bfcca","Type":"ContainerDied","Data":"f2f1cba7d39c6c3a317bfa299483b394375b12e479bb1be4f1a65458e2729e03"} Dec 05 05:37:37 crc kubenswrapper[4652]: I1205 05:37:37.748120 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" event={"ID":"e5af0d49-ade4-4624-9dab-984dab1bfcca","Type":"ContainerStarted","Data":"553faf33c21eac010a7be88c9a6d6db754772b7090c34e5c820ef9167f3f9af6"} Dec 05 05:37:40 crc kubenswrapper[4652]: I1205 05:37:40.765903 4652 generic.go:334] "Generic (PLEG): container finished" podID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerID="6b9e901822baab832c97ef347f3478eac6ab7f8bf8da5d74b26f5530e0eed464" exitCode=0 Dec 05 05:37:40 crc kubenswrapper[4652]: I1205 05:37:40.765933 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" event={"ID":"e5af0d49-ade4-4624-9dab-984dab1bfcca","Type":"ContainerDied","Data":"6b9e901822baab832c97ef347f3478eac6ab7f8bf8da5d74b26f5530e0eed464"} Dec 05 05:37:41 crc kubenswrapper[4652]: I1205 05:37:41.773823 4652 generic.go:334] "Generic (PLEG): container finished" podID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerID="d604fb2f86c15a3698a2d255e97116f2cb6bbf19d33812ca3274159c65dcd4f6" exitCode=0 Dec 05 05:37:41 crc kubenswrapper[4652]: I1205 05:37:41.773871 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" event={"ID":"e5af0d49-ade4-4624-9dab-984dab1bfcca","Type":"ContainerDied","Data":"d604fb2f86c15a3698a2d255e97116f2cb6bbf19d33812ca3274159c65dcd4f6"} Dec 05 05:37:42 crc kubenswrapper[4652]: I1205 05:37:42.958678 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.147912 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-util\") pod \"e5af0d49-ade4-4624-9dab-984dab1bfcca\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.147965 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-bundle\") pod \"e5af0d49-ade4-4624-9dab-984dab1bfcca\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.148036 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95sp7\" (UniqueName: \"kubernetes.io/projected/e5af0d49-ade4-4624-9dab-984dab1bfcca-kube-api-access-95sp7\") pod \"e5af0d49-ade4-4624-9dab-984dab1bfcca\" (UID: \"e5af0d49-ade4-4624-9dab-984dab1bfcca\") " Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.148609 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-bundle" (OuterVolumeSpecName: "bundle") pod "e5af0d49-ade4-4624-9dab-984dab1bfcca" (UID: "e5af0d49-ade4-4624-9dab-984dab1bfcca"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.152639 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5af0d49-ade4-4624-9dab-984dab1bfcca-kube-api-access-95sp7" (OuterVolumeSpecName: "kube-api-access-95sp7") pod "e5af0d49-ade4-4624-9dab-984dab1bfcca" (UID: "e5af0d49-ade4-4624-9dab-984dab1bfcca"). InnerVolumeSpecName "kube-api-access-95sp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.155364 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-util" (OuterVolumeSpecName: "util") pod "e5af0d49-ade4-4624-9dab-984dab1bfcca" (UID: "e5af0d49-ade4-4624-9dab-984dab1bfcca"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.249743 4652 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-util\") on node \"crc\" DevicePath \"\"" Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.249777 4652 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5af0d49-ade4-4624-9dab-984dab1bfcca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.249788 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95sp7\" (UniqueName: \"kubernetes.io/projected/e5af0d49-ade4-4624-9dab-984dab1bfcca-kube-api-access-95sp7\") on node \"crc\" DevicePath \"\"" Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.785668 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" event={"ID":"e5af0d49-ade4-4624-9dab-984dab1bfcca","Type":"ContainerDied","Data":"553faf33c21eac010a7be88c9a6d6db754772b7090c34e5c820ef9167f3f9af6"} Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.785710 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="553faf33c21eac010a7be88c9a6d6db754772b7090c34e5c820ef9167f3f9af6" Dec 05 05:37:43 crc kubenswrapper[4652]: I1205 05:37:43.785907 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.397273 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4"] Dec 05 05:37:48 crc kubenswrapper[4652]: E1205 05:37:48.397503 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerName="extract" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.397517 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerName="extract" Dec 05 05:37:48 crc kubenswrapper[4652]: E1205 05:37:48.397539 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerName="util" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.397545 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerName="util" Dec 05 05:37:48 crc kubenswrapper[4652]: E1205 05:37:48.397579 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerName="pull" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.397586 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerName="pull" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.397693 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5af0d49-ade4-4624-9dab-984dab1bfcca" containerName="extract" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.398113 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.400867 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-f9cvj" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.400998 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.401045 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.407666 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4"] Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.412338 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9xqr\" (UniqueName: \"kubernetes.io/projected/44a02e83-b669-4ebd-824d-c392001131a0-kube-api-access-l9xqr\") pod \"nmstate-operator-5b5b58f5c8-6tkg4\" (UID: \"44a02e83-b669-4ebd-824d-c392001131a0\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.513599 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9xqr\" (UniqueName: \"kubernetes.io/projected/44a02e83-b669-4ebd-824d-c392001131a0-kube-api-access-l9xqr\") pod \"nmstate-operator-5b5b58f5c8-6tkg4\" (UID: \"44a02e83-b669-4ebd-824d-c392001131a0\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.531405 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9xqr\" (UniqueName: \"kubernetes.io/projected/44a02e83-b669-4ebd-824d-c392001131a0-kube-api-access-l9xqr\") pod \"nmstate-operator-5b5b58f5c8-6tkg4\" (UID: \"44a02e83-b669-4ebd-824d-c392001131a0\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4" Dec 05 05:37:48 crc kubenswrapper[4652]: I1205 05:37:48.715512 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4" Dec 05 05:37:49 crc kubenswrapper[4652]: I1205 05:37:49.106511 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4"] Dec 05 05:37:49 crc kubenswrapper[4652]: I1205 05:37:49.825349 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4" event={"ID":"44a02e83-b669-4ebd-824d-c392001131a0","Type":"ContainerStarted","Data":"3847821d62f90d224c55941d2e6f76efdd0dfa9076f9684d455ad52cd3a49cca"} Dec 05 05:37:51 crc kubenswrapper[4652]: I1205 05:37:51.842396 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4" event={"ID":"44a02e83-b669-4ebd-824d-c392001131a0","Type":"ContainerStarted","Data":"252f607bf3e3530ef102379bf3f1ea8057fc5e865e27e14a59b1e7a22d02c5ec"} Dec 05 05:37:51 crc kubenswrapper[4652]: I1205 05:37:51.857845 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6tkg4" podStartSLOduration=1.893997001 podStartE2EDuration="3.857828609s" podCreationTimestamp="2025-12-05 05:37:48 +0000 UTC" firstStartedPulling="2025-12-05 05:37:49.114783955 +0000 UTC m=+671.351514222" lastFinishedPulling="2025-12-05 05:37:51.078615563 +0000 UTC m=+673.315345830" observedRunningTime="2025-12-05 05:37:51.854392756 +0000 UTC m=+674.091123024" watchObservedRunningTime="2025-12-05 05:37:51.857828609 +0000 UTC m=+674.094558875" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.405522 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.407613 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.412931 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-9f9v2" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.416469 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.417921 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.419419 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.427462 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.433265 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-m7ckb"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.434362 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.440884 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.524580 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.525311 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.526773 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-zv8cw" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.526932 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.527083 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.533291 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.542759 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/51d7fcc3-f6cf-48b7-a948-471a58961770-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-wlk99\" (UID: \"51d7fcc3-f6cf-48b7-a948-471a58961770\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.542817 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-nmstate-lock\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.542841 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-dbus-socket\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.542880 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndwt4\" (UniqueName: \"kubernetes.io/projected/51d7fcc3-f6cf-48b7-a948-471a58961770-kube-api-access-ndwt4\") pod \"nmstate-webhook-5f6d4c5ccb-wlk99\" (UID: \"51d7fcc3-f6cf-48b7-a948-471a58961770\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.542899 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x44jd\" (UniqueName: \"kubernetes.io/projected/fbc4582c-3b3c-4a43-b80b-d944ec9236db-kube-api-access-x44jd\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.542922 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-872g5\" (UniqueName: \"kubernetes.io/projected/365195ea-8d24-4d70-950d-73fd7bb88e0e-kube-api-access-872g5\") pod \"nmstate-metrics-7f946cbc9-8fss9\" (UID: \"365195ea-8d24-4d70-950d-73fd7bb88e0e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.542948 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-ovs-socket\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644002 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndwt4\" (UniqueName: \"kubernetes.io/projected/51d7fcc3-f6cf-48b7-a948-471a58961770-kube-api-access-ndwt4\") pod \"nmstate-webhook-5f6d4c5ccb-wlk99\" (UID: \"51d7fcc3-f6cf-48b7-a948-471a58961770\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644041 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x44jd\" (UniqueName: \"kubernetes.io/projected/fbc4582c-3b3c-4a43-b80b-d944ec9236db-kube-api-access-x44jd\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644074 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-872g5\" (UniqueName: \"kubernetes.io/projected/365195ea-8d24-4d70-950d-73fd7bb88e0e-kube-api-access-872g5\") pod \"nmstate-metrics-7f946cbc9-8fss9\" (UID: \"365195ea-8d24-4d70-950d-73fd7bb88e0e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644110 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-ovs-socket\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644129 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/51d7fcc3-f6cf-48b7-a948-471a58961770-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-wlk99\" (UID: \"51d7fcc3-f6cf-48b7-a948-471a58961770\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644165 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fs8z\" (UniqueName: \"kubernetes.io/projected/ecb29914-d051-4ef2-ae2a-3152e9523d7e-kube-api-access-8fs8z\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644197 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-nmstate-lock\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644215 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ecb29914-d051-4ef2-ae2a-3152e9523d7e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644239 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-dbus-socket\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644255 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ecb29914-d051-4ef2-ae2a-3152e9523d7e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644591 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-nmstate-lock\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.644660 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-ovs-socket\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.645044 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/fbc4582c-3b3c-4a43-b80b-d944ec9236db-dbus-socket\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.655754 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/51d7fcc3-f6cf-48b7-a948-471a58961770-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-wlk99\" (UID: \"51d7fcc3-f6cf-48b7-a948-471a58961770\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.663186 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndwt4\" (UniqueName: \"kubernetes.io/projected/51d7fcc3-f6cf-48b7-a948-471a58961770-kube-api-access-ndwt4\") pod \"nmstate-webhook-5f6d4c5ccb-wlk99\" (UID: \"51d7fcc3-f6cf-48b7-a948-471a58961770\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.663542 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-872g5\" (UniqueName: \"kubernetes.io/projected/365195ea-8d24-4d70-950d-73fd7bb88e0e-kube-api-access-872g5\") pod \"nmstate-metrics-7f946cbc9-8fss9\" (UID: \"365195ea-8d24-4d70-950d-73fd7bb88e0e\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.668117 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x44jd\" (UniqueName: \"kubernetes.io/projected/fbc4582c-3b3c-4a43-b80b-d944ec9236db-kube-api-access-x44jd\") pod \"nmstate-handler-m7ckb\" (UID: \"fbc4582c-3b3c-4a43-b80b-d944ec9236db\") " pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.721796 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-b7757869c-w5tlc"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.722583 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.725622 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.728923 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-b7757869c-w5tlc"] Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.734843 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.745921 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fs8z\" (UniqueName: \"kubernetes.io/projected/ecb29914-d051-4ef2-ae2a-3152e9523d7e-kube-api-access-8fs8z\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.746298 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ecb29914-d051-4ef2-ae2a-3152e9523d7e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.746325 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ecb29914-d051-4ef2-ae2a-3152e9523d7e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: E1205 05:37:57.746469 4652 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 05 05:37:57 crc kubenswrapper[4652]: E1205 05:37:57.746530 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ecb29914-d051-4ef2-ae2a-3152e9523d7e-plugin-serving-cert podName:ecb29914-d051-4ef2-ae2a-3152e9523d7e nodeName:}" failed. No retries permitted until 2025-12-05 05:37:58.246509903 +0000 UTC m=+680.483240170 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/ecb29914-d051-4ef2-ae2a-3152e9523d7e-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-tq6cx" (UID: "ecb29914-d051-4ef2-ae2a-3152e9523d7e") : secret "plugin-serving-cert" not found Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.747512 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/ecb29914-d051-4ef2-ae2a-3152e9523d7e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.749786 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.768364 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fs8z\" (UniqueName: \"kubernetes.io/projected/ecb29914-d051-4ef2-ae2a-3152e9523d7e-kube-api-access-8fs8z\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.850962 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96q4q\" (UniqueName: \"kubernetes.io/projected/75cf5de9-2d73-46d3-a619-6bb670e14bda-kube-api-access-96q4q\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.851033 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-oauth-serving-cert\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.852317 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-service-ca\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.852461 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-trusted-ca-bundle\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.852520 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-config\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.852580 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-serving-cert\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.852638 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-oauth-config\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.878617 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-m7ckb" event={"ID":"fbc4582c-3b3c-4a43-b80b-d944ec9236db","Type":"ContainerStarted","Data":"acaed868551bdabb5ddc7656e8f250c29b0d009c5e8d54dfb44896833a3327ff"} Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.933191 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99"] Dec 05 05:37:57 crc kubenswrapper[4652]: W1205 05:37:57.937849 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51d7fcc3_f6cf_48b7_a948_471a58961770.slice/crio-f162e243fb25f23b12ead6e5664676290f01d5f2db3f49e6be4fc451834fceb9 WatchSource:0}: Error finding container f162e243fb25f23b12ead6e5664676290f01d5f2db3f49e6be4fc451834fceb9: Status 404 returned error can't find the container with id f162e243fb25f23b12ead6e5664676290f01d5f2db3f49e6be4fc451834fceb9 Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.953715 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96q4q\" (UniqueName: \"kubernetes.io/projected/75cf5de9-2d73-46d3-a619-6bb670e14bda-kube-api-access-96q4q\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.953766 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-oauth-serving-cert\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.953813 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-service-ca\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.953855 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-trusted-ca-bundle\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.953876 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-config\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.953896 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-serving-cert\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.953926 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-oauth-config\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.954801 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-oauth-serving-cert\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.954950 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-trusted-ca-bundle\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.955380 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-config\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.955500 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/75cf5de9-2d73-46d3-a619-6bb670e14bda-service-ca\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.960964 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-serving-cert\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.961397 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/75cf5de9-2d73-46d3-a619-6bb670e14bda-console-oauth-config\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.967488 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96q4q\" (UniqueName: \"kubernetes.io/projected/75cf5de9-2d73-46d3-a619-6bb670e14bda-kube-api-access-96q4q\") pod \"console-b7757869c-w5tlc\" (UID: \"75cf5de9-2d73-46d3-a619-6bb670e14bda\") " pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:57 crc kubenswrapper[4652]: I1205 05:37:57.987774 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9"] Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.083055 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.232824 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-b7757869c-w5tlc"] Dec 05 05:37:58 crc kubenswrapper[4652]: W1205 05:37:58.236009 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75cf5de9_2d73_46d3_a619_6bb670e14bda.slice/crio-c86fd89c1c952feaea0d97550463a7766bce27cee53f908a575a4e249ccd69ca WatchSource:0}: Error finding container c86fd89c1c952feaea0d97550463a7766bce27cee53f908a575a4e249ccd69ca: Status 404 returned error can't find the container with id c86fd89c1c952feaea0d97550463a7766bce27cee53f908a575a4e249ccd69ca Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.259286 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ecb29914-d051-4ef2-ae2a-3152e9523d7e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.263269 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/ecb29914-d051-4ef2-ae2a-3152e9523d7e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-tq6cx\" (UID: \"ecb29914-d051-4ef2-ae2a-3152e9523d7e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.435867 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.686839 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx"] Dec 05 05:37:58 crc kubenswrapper[4652]: W1205 05:37:58.700483 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podecb29914_d051_4ef2_ae2a_3152e9523d7e.slice/crio-1aa9f6c74cf5af31411558f6e1c4b73717318e527e20fb9ebfca5caf87852aa7 WatchSource:0}: Error finding container 1aa9f6c74cf5af31411558f6e1c4b73717318e527e20fb9ebfca5caf87852aa7: Status 404 returned error can't find the container with id 1aa9f6c74cf5af31411558f6e1c4b73717318e527e20fb9ebfca5caf87852aa7 Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.885370 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-b7757869c-w5tlc" event={"ID":"75cf5de9-2d73-46d3-a619-6bb670e14bda","Type":"ContainerStarted","Data":"a39830a163dd2a095ea5be82a94e120b8010bc97eb8ecc8cde24d81f670472d9"} Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.885432 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-b7757869c-w5tlc" event={"ID":"75cf5de9-2d73-46d3-a619-6bb670e14bda","Type":"ContainerStarted","Data":"c86fd89c1c952feaea0d97550463a7766bce27cee53f908a575a4e249ccd69ca"} Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.886449 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" event={"ID":"ecb29914-d051-4ef2-ae2a-3152e9523d7e","Type":"ContainerStarted","Data":"1aa9f6c74cf5af31411558f6e1c4b73717318e527e20fb9ebfca5caf87852aa7"} Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.887370 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" event={"ID":"51d7fcc3-f6cf-48b7-a948-471a58961770","Type":"ContainerStarted","Data":"f162e243fb25f23b12ead6e5664676290f01d5f2db3f49e6be4fc451834fceb9"} Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.888372 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" event={"ID":"365195ea-8d24-4d70-950d-73fd7bb88e0e","Type":"ContainerStarted","Data":"7d4c471362351d2ce891744c4f7145cbb4ac73820a355aef3b169ef9deec3af0"} Dec 05 05:37:58 crc kubenswrapper[4652]: I1205 05:37:58.900629 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-b7757869c-w5tlc" podStartSLOduration=1.9006170180000002 podStartE2EDuration="1.900617018s" podCreationTimestamp="2025-12-05 05:37:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:37:58.898978877 +0000 UTC m=+681.135709144" watchObservedRunningTime="2025-12-05 05:37:58.900617018 +0000 UTC m=+681.137347285" Dec 05 05:38:00 crc kubenswrapper[4652]: I1205 05:38:00.902879 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-m7ckb" event={"ID":"fbc4582c-3b3c-4a43-b80b-d944ec9236db","Type":"ContainerStarted","Data":"0413196dc5a8120ff729174928b4615335769df83976b18cf14a242c48c5f76f"} Dec 05 05:38:00 crc kubenswrapper[4652]: I1205 05:38:00.903093 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:38:00 crc kubenswrapper[4652]: I1205 05:38:00.904911 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" event={"ID":"51d7fcc3-f6cf-48b7-a948-471a58961770","Type":"ContainerStarted","Data":"1d55a5831e0ba565d8d93422774bc6a3b801e6bda3bfdaca8711b7523893ef92"} Dec 05 05:38:00 crc kubenswrapper[4652]: I1205 05:38:00.904994 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:38:00 crc kubenswrapper[4652]: I1205 05:38:00.906774 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" event={"ID":"365195ea-8d24-4d70-950d-73fd7bb88e0e","Type":"ContainerStarted","Data":"39137e3dba3d8ffca54b8c25f85a193c97c7c1e2e462adf196956fb3ad0a8408"} Dec 05 05:38:00 crc kubenswrapper[4652]: I1205 05:38:00.916315 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-m7ckb" podStartSLOduration=1.374214234 podStartE2EDuration="3.916305514s" podCreationTimestamp="2025-12-05 05:37:57 +0000 UTC" firstStartedPulling="2025-12-05 05:37:57.788274017 +0000 UTC m=+680.025004284" lastFinishedPulling="2025-12-05 05:38:00.330365297 +0000 UTC m=+682.567095564" observedRunningTime="2025-12-05 05:38:00.91494734 +0000 UTC m=+683.151677606" watchObservedRunningTime="2025-12-05 05:38:00.916305514 +0000 UTC m=+683.153035781" Dec 05 05:38:00 crc kubenswrapper[4652]: I1205 05:38:00.927133 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" podStartSLOduration=1.535747282 podStartE2EDuration="3.927115697s" podCreationTimestamp="2025-12-05 05:37:57 +0000 UTC" firstStartedPulling="2025-12-05 05:37:57.941775238 +0000 UTC m=+680.178505505" lastFinishedPulling="2025-12-05 05:38:00.333143652 +0000 UTC m=+682.569873920" observedRunningTime="2025-12-05 05:38:00.925988337 +0000 UTC m=+683.162718624" watchObservedRunningTime="2025-12-05 05:38:00.927115697 +0000 UTC m=+683.163845964" Dec 05 05:38:01 crc kubenswrapper[4652]: I1205 05:38:01.920936 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" event={"ID":"ecb29914-d051-4ef2-ae2a-3152e9523d7e","Type":"ContainerStarted","Data":"da43bef1c21a86029eb0ead6d06bf06bb29f15321ca025b82a8feebcbff22309"} Dec 05 05:38:01 crc kubenswrapper[4652]: I1205 05:38:01.936574 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-tq6cx" podStartSLOduration=2.275822643 podStartE2EDuration="4.936535339s" podCreationTimestamp="2025-12-05 05:37:57 +0000 UTC" firstStartedPulling="2025-12-05 05:37:58.700163413 +0000 UTC m=+680.936893680" lastFinishedPulling="2025-12-05 05:38:01.360876109 +0000 UTC m=+683.597606376" observedRunningTime="2025-12-05 05:38:01.935236696 +0000 UTC m=+684.171966964" watchObservedRunningTime="2025-12-05 05:38:01.936535339 +0000 UTC m=+684.173265606" Dec 05 05:38:02 crc kubenswrapper[4652]: I1205 05:38:02.930639 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" event={"ID":"365195ea-8d24-4d70-950d-73fd7bb88e0e","Type":"ContainerStarted","Data":"393541cc3d819ebe2f7117a68bff17c62638bb0ff43cf9c03d9016654572e14e"} Dec 05 05:38:02 crc kubenswrapper[4652]: I1205 05:38:02.948076 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-8fss9" podStartSLOduration=1.351146465 podStartE2EDuration="5.948055721s" podCreationTimestamp="2025-12-05 05:37:57 +0000 UTC" firstStartedPulling="2025-12-05 05:37:57.990867175 +0000 UTC m=+680.227597432" lastFinishedPulling="2025-12-05 05:38:02.587776421 +0000 UTC m=+684.824506688" observedRunningTime="2025-12-05 05:38:02.946996529 +0000 UTC m=+685.183726796" watchObservedRunningTime="2025-12-05 05:38:02.948055721 +0000 UTC m=+685.184785988" Dec 05 05:38:04 crc kubenswrapper[4652]: I1205 05:38:04.150073 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:38:04 crc kubenswrapper[4652]: I1205 05:38:04.150131 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:38:07 crc kubenswrapper[4652]: I1205 05:38:07.773461 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-m7ckb" Dec 05 05:38:08 crc kubenswrapper[4652]: I1205 05:38:08.083271 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:38:08 crc kubenswrapper[4652]: I1205 05:38:08.083342 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:38:08 crc kubenswrapper[4652]: I1205 05:38:08.088756 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:38:08 crc kubenswrapper[4652]: I1205 05:38:08.969921 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-b7757869c-w5tlc" Dec 05 05:38:09 crc kubenswrapper[4652]: I1205 05:38:09.012238 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-pjnkl"] Dec 05 05:38:17 crc kubenswrapper[4652]: I1205 05:38:17.739390 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-wlk99" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.676324 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz"] Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.677953 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.679777 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.685122 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz"] Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.838863 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr5cn\" (UniqueName: \"kubernetes.io/projected/50613739-37b4-41ab-b9b1-73afc91d1fad-kube-api-access-gr5cn\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.839053 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.839087 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.940863 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.940906 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.940948 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr5cn\" (UniqueName: \"kubernetes.io/projected/50613739-37b4-41ab-b9b1-73afc91d1fad-kube-api-access-gr5cn\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.941486 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.941628 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.960168 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr5cn\" (UniqueName: \"kubernetes.io/projected/50613739-37b4-41ab-b9b1-73afc91d1fad-kube-api-access-gr5cn\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:30 crc kubenswrapper[4652]: I1205 05:38:30.991532 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:31 crc kubenswrapper[4652]: I1205 05:38:31.363619 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz"] Dec 05 05:38:32 crc kubenswrapper[4652]: I1205 05:38:32.094130 4652 generic.go:334] "Generic (PLEG): container finished" podID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerID="370eb000acfba871b55a4b5a99c5c09dccfae035e9afdfe5c1e2d66493b02edc" exitCode=0 Dec 05 05:38:32 crc kubenswrapper[4652]: I1205 05:38:32.094172 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" event={"ID":"50613739-37b4-41ab-b9b1-73afc91d1fad","Type":"ContainerDied","Data":"370eb000acfba871b55a4b5a99c5c09dccfae035e9afdfe5c1e2d66493b02edc"} Dec 05 05:38:32 crc kubenswrapper[4652]: I1205 05:38:32.094196 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" event={"ID":"50613739-37b4-41ab-b9b1-73afc91d1fad","Type":"ContainerStarted","Data":"d6b051be4f19ff0d0c6bcd24688cccd0ad1a5ce12836c56beb3295a55e147ca6"} Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.045983 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-pjnkl" podUID="f4bd4318-0406-40e2-8b50-b79c312bb10a" containerName="console" containerID="cri-o://f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9" gracePeriod=15 Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.104141 4652 generic.go:334] "Generic (PLEG): container finished" podID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerID="67bf09bcb20d77cbb0baa15518da529d5bf7281e42e14007c16de4f387eb1ef4" exitCode=0 Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.104181 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" event={"ID":"50613739-37b4-41ab-b9b1-73afc91d1fad","Type":"ContainerDied","Data":"67bf09bcb20d77cbb0baa15518da529d5bf7281e42e14007c16de4f387eb1ef4"} Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.150399 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.150460 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.368071 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-pjnkl_f4bd4318-0406-40e2-8b50-b79c312bb10a/console/0.log" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.368143 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.383804 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-service-ca\") pod \"f4bd4318-0406-40e2-8b50-b79c312bb10a\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.383855 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-config\") pod \"f4bd4318-0406-40e2-8b50-b79c312bb10a\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.383902 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-oauth-config\") pod \"f4bd4318-0406-40e2-8b50-b79c312bb10a\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.383922 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-oauth-serving-cert\") pod \"f4bd4318-0406-40e2-8b50-b79c312bb10a\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.383936 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-serving-cert\") pod \"f4bd4318-0406-40e2-8b50-b79c312bb10a\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.383969 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-trusted-ca-bundle\") pod \"f4bd4318-0406-40e2-8b50-b79c312bb10a\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.383989 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk2tq\" (UniqueName: \"kubernetes.io/projected/f4bd4318-0406-40e2-8b50-b79c312bb10a-kube-api-access-kk2tq\") pod \"f4bd4318-0406-40e2-8b50-b79c312bb10a\" (UID: \"f4bd4318-0406-40e2-8b50-b79c312bb10a\") " Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.384798 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-service-ca" (OuterVolumeSpecName: "service-ca") pod "f4bd4318-0406-40e2-8b50-b79c312bb10a" (UID: "f4bd4318-0406-40e2-8b50-b79c312bb10a"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.385176 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-config" (OuterVolumeSpecName: "console-config") pod "f4bd4318-0406-40e2-8b50-b79c312bb10a" (UID: "f4bd4318-0406-40e2-8b50-b79c312bb10a"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.385503 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "f4bd4318-0406-40e2-8b50-b79c312bb10a" (UID: "f4bd4318-0406-40e2-8b50-b79c312bb10a"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.385855 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "f4bd4318-0406-40e2-8b50-b79c312bb10a" (UID: "f4bd4318-0406-40e2-8b50-b79c312bb10a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.389458 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "f4bd4318-0406-40e2-8b50-b79c312bb10a" (UID: "f4bd4318-0406-40e2-8b50-b79c312bb10a"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.389704 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4bd4318-0406-40e2-8b50-b79c312bb10a-kube-api-access-kk2tq" (OuterVolumeSpecName: "kube-api-access-kk2tq") pod "f4bd4318-0406-40e2-8b50-b79c312bb10a" (UID: "f4bd4318-0406-40e2-8b50-b79c312bb10a"). InnerVolumeSpecName "kube-api-access-kk2tq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.389945 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "f4bd4318-0406-40e2-8b50-b79c312bb10a" (UID: "f4bd4318-0406-40e2-8b50-b79c312bb10a"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.485218 4652 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.485242 4652 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.485252 4652 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4bd4318-0406-40e2-8b50-b79c312bb10a-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.485276 4652 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.485290 4652 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.485298 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk2tq\" (UniqueName: \"kubernetes.io/projected/f4bd4318-0406-40e2-8b50-b79c312bb10a-kube-api-access-kk2tq\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:34 crc kubenswrapper[4652]: I1205 05:38:34.485310 4652 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f4bd4318-0406-40e2-8b50-b79c312bb10a-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.110672 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-pjnkl_f4bd4318-0406-40e2-8b50-b79c312bb10a/console/0.log" Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.110918 4652 generic.go:334] "Generic (PLEG): container finished" podID="f4bd4318-0406-40e2-8b50-b79c312bb10a" containerID="f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9" exitCode=2 Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.110993 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-pjnkl" event={"ID":"f4bd4318-0406-40e2-8b50-b79c312bb10a","Type":"ContainerDied","Data":"f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9"} Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.111014 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-pjnkl" Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.111037 4652 scope.go:117] "RemoveContainer" containerID="f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9" Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.111024 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-pjnkl" event={"ID":"f4bd4318-0406-40e2-8b50-b79c312bb10a","Type":"ContainerDied","Data":"598e5af9201d9ee89551dc66cae596f3368ffab8ca016b9d0d36810283a61f8a"} Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.113238 4652 generic.go:334] "Generic (PLEG): container finished" podID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerID="37482af46005d8c995835cc6d1e7e1cc3e1e0aafaf3a1811e6f5949baa51829a" exitCode=0 Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.113265 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" event={"ID":"50613739-37b4-41ab-b9b1-73afc91d1fad","Type":"ContainerDied","Data":"37482af46005d8c995835cc6d1e7e1cc3e1e0aafaf3a1811e6f5949baa51829a"} Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.129109 4652 scope.go:117] "RemoveContainer" containerID="f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9" Dec 05 05:38:35 crc kubenswrapper[4652]: E1205 05:38:35.129439 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9\": container with ID starting with f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9 not found: ID does not exist" containerID="f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9" Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.129486 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9"} err="failed to get container status \"f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9\": rpc error: code = NotFound desc = could not find container \"f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9\": container with ID starting with f88fb0d9ce76264883e96d5546e3c3d80608abef4befdd25db9adfe00ec1e1e9 not found: ID does not exist" Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.140629 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-pjnkl"] Dec 05 05:38:35 crc kubenswrapper[4652]: I1205 05:38:35.144487 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-pjnkl"] Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.130115 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4bd4318-0406-40e2-8b50-b79c312bb10a" path="/var/lib/kubelet/pods/f4bd4318-0406-40e2-8b50-b79c312bb10a/volumes" Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.285151 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.304847 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-bundle\") pod \"50613739-37b4-41ab-b9b1-73afc91d1fad\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.304889 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-util\") pod \"50613739-37b4-41ab-b9b1-73afc91d1fad\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.304950 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gr5cn\" (UniqueName: \"kubernetes.io/projected/50613739-37b4-41ab-b9b1-73afc91d1fad-kube-api-access-gr5cn\") pod \"50613739-37b4-41ab-b9b1-73afc91d1fad\" (UID: \"50613739-37b4-41ab-b9b1-73afc91d1fad\") " Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.305759 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-bundle" (OuterVolumeSpecName: "bundle") pod "50613739-37b4-41ab-b9b1-73afc91d1fad" (UID: "50613739-37b4-41ab-b9b1-73afc91d1fad"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.308998 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50613739-37b4-41ab-b9b1-73afc91d1fad-kube-api-access-gr5cn" (OuterVolumeSpecName: "kube-api-access-gr5cn") pod "50613739-37b4-41ab-b9b1-73afc91d1fad" (UID: "50613739-37b4-41ab-b9b1-73afc91d1fad"). InnerVolumeSpecName "kube-api-access-gr5cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.315707 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-util" (OuterVolumeSpecName: "util") pod "50613739-37b4-41ab-b9b1-73afc91d1fad" (UID: "50613739-37b4-41ab-b9b1-73afc91d1fad"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.405957 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gr5cn\" (UniqueName: \"kubernetes.io/projected/50613739-37b4-41ab-b9b1-73afc91d1fad-kube-api-access-gr5cn\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.405991 4652 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:36 crc kubenswrapper[4652]: I1205 05:38:36.406001 4652 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50613739-37b4-41ab-b9b1-73afc91d1fad-util\") on node \"crc\" DevicePath \"\"" Dec 05 05:38:37 crc kubenswrapper[4652]: I1205 05:38:37.124863 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" event={"ID":"50613739-37b4-41ab-b9b1-73afc91d1fad","Type":"ContainerDied","Data":"d6b051be4f19ff0d0c6bcd24688cccd0ad1a5ce12836c56beb3295a55e147ca6"} Dec 05 05:38:37 crc kubenswrapper[4652]: I1205 05:38:37.124897 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6b051be4f19ff0d0c6bcd24688cccd0ad1a5ce12836c56beb3295a55e147ca6" Dec 05 05:38:37 crc kubenswrapper[4652]: I1205 05:38:37.124910 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.955387 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz"] Dec 05 05:38:47 crc kubenswrapper[4652]: E1205 05:38:47.956861 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerName="extract" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.956938 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerName="extract" Dec 05 05:38:47 crc kubenswrapper[4652]: E1205 05:38:47.957003 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerName="pull" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.957048 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerName="pull" Dec 05 05:38:47 crc kubenswrapper[4652]: E1205 05:38:47.957094 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4bd4318-0406-40e2-8b50-b79c312bb10a" containerName="console" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.957136 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4bd4318-0406-40e2-8b50-b79c312bb10a" containerName="console" Dec 05 05:38:47 crc kubenswrapper[4652]: E1205 05:38:47.957196 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerName="util" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.957242 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerName="util" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.957394 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4bd4318-0406-40e2-8b50-b79c312bb10a" containerName="console" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.957447 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="50613739-37b4-41ab-b9b1-73afc91d1fad" containerName="extract" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.957966 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.960869 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.961271 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.963454 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.963889 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.965191 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-dw24t" Dec 05 05:38:47 crc kubenswrapper[4652]: I1205 05:38:47.978624 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz"] Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.140857 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3186078d-155e-46dd-b2ea-7f0802181e82-apiservice-cert\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.141034 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2t25\" (UniqueName: \"kubernetes.io/projected/3186078d-155e-46dd-b2ea-7f0802181e82-kube-api-access-r2t25\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.141249 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3186078d-155e-46dd-b2ea-7f0802181e82-webhook-cert\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.242407 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2t25\" (UniqueName: \"kubernetes.io/projected/3186078d-155e-46dd-b2ea-7f0802181e82-kube-api-access-r2t25\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.242454 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3186078d-155e-46dd-b2ea-7f0802181e82-webhook-cert\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.242481 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3186078d-155e-46dd-b2ea-7f0802181e82-apiservice-cert\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.247209 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3186078d-155e-46dd-b2ea-7f0802181e82-webhook-cert\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.247985 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3186078d-155e-46dd-b2ea-7f0802181e82-apiservice-cert\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.260457 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv"] Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.261277 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.263198 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2t25\" (UniqueName: \"kubernetes.io/projected/3186078d-155e-46dd-b2ea-7f0802181e82-kube-api-access-r2t25\") pod \"metallb-operator-controller-manager-75747b5f89-glsxz\" (UID: \"3186078d-155e-46dd-b2ea-7f0802181e82\") " pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.264314 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.264710 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-w2vkl" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.265196 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.271932 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.287316 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv"] Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.343883 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eb813616-370d-43a9-9b81-cd9c93f6dc06-webhook-cert\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.343949 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njhh5\" (UniqueName: \"kubernetes.io/projected/eb813616-370d-43a9-9b81-cd9c93f6dc06-kube-api-access-njhh5\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.344027 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eb813616-370d-43a9-9b81-cd9c93f6dc06-apiservice-cert\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.445867 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njhh5\" (UniqueName: \"kubernetes.io/projected/eb813616-370d-43a9-9b81-cd9c93f6dc06-kube-api-access-njhh5\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.445931 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eb813616-370d-43a9-9b81-cd9c93f6dc06-apiservice-cert\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.446023 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eb813616-370d-43a9-9b81-cd9c93f6dc06-webhook-cert\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.454117 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eb813616-370d-43a9-9b81-cd9c93f6dc06-webhook-cert\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.454117 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eb813616-370d-43a9-9b81-cd9c93f6dc06-apiservice-cert\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.458378 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njhh5\" (UniqueName: \"kubernetes.io/projected/eb813616-370d-43a9-9b81-cd9c93f6dc06-kube-api-access-njhh5\") pod \"metallb-operator-webhook-server-5bf4c9cd7b-w67kv\" (UID: \"eb813616-370d-43a9-9b81-cd9c93f6dc06\") " pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.638672 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.656349 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz"] Dec 05 05:38:48 crc kubenswrapper[4652]: I1205 05:38:48.786045 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv"] Dec 05 05:38:48 crc kubenswrapper[4652]: W1205 05:38:48.790301 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb813616_370d_43a9_9b81_cd9c93f6dc06.slice/crio-0d21139ea82cf8662de965750c9c507fffb6bd924d57fa2bee594ffddf73f074 WatchSource:0}: Error finding container 0d21139ea82cf8662de965750c9c507fffb6bd924d57fa2bee594ffddf73f074: Status 404 returned error can't find the container with id 0d21139ea82cf8662de965750c9c507fffb6bd924d57fa2bee594ffddf73f074 Dec 05 05:38:49 crc kubenswrapper[4652]: I1205 05:38:49.183012 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" event={"ID":"eb813616-370d-43a9-9b81-cd9c93f6dc06","Type":"ContainerStarted","Data":"0d21139ea82cf8662de965750c9c507fffb6bd924d57fa2bee594ffddf73f074"} Dec 05 05:38:49 crc kubenswrapper[4652]: I1205 05:38:49.184138 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" event={"ID":"3186078d-155e-46dd-b2ea-7f0802181e82","Type":"ContainerStarted","Data":"edcfe9e09ea3ec8029cdb52d00088b0b2f83e93e0b0494382706325ab99d0b5c"} Dec 05 05:38:53 crc kubenswrapper[4652]: I1205 05:38:53.212245 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" event={"ID":"eb813616-370d-43a9-9b81-cd9c93f6dc06","Type":"ContainerStarted","Data":"52cf4a19d93160e3ed83a315f9a7f2eadb918d19dca4f698df3a7eef7b9784e3"} Dec 05 05:38:53 crc kubenswrapper[4652]: I1205 05:38:53.212873 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:38:53 crc kubenswrapper[4652]: I1205 05:38:53.213743 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" event={"ID":"3186078d-155e-46dd-b2ea-7f0802181e82","Type":"ContainerStarted","Data":"2f672611f389966298932b760b41c0798ed5e454fd7dfee5bb4accfb58724cb2"} Dec 05 05:38:53 crc kubenswrapper[4652]: I1205 05:38:53.213892 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:38:53 crc kubenswrapper[4652]: I1205 05:38:53.228897 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" podStartSLOduration=1.715857875 podStartE2EDuration="5.22888347s" podCreationTimestamp="2025-12-05 05:38:48 +0000 UTC" firstStartedPulling="2025-12-05 05:38:48.793399738 +0000 UTC m=+731.030130006" lastFinishedPulling="2025-12-05 05:38:52.306425333 +0000 UTC m=+734.543155601" observedRunningTime="2025-12-05 05:38:53.225417603 +0000 UTC m=+735.462147859" watchObservedRunningTime="2025-12-05 05:38:53.22888347 +0000 UTC m=+735.465613738" Dec 05 05:38:53 crc kubenswrapper[4652]: I1205 05:38:53.240373 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" podStartSLOduration=2.612095005 podStartE2EDuration="6.240362913s" podCreationTimestamp="2025-12-05 05:38:47 +0000 UTC" firstStartedPulling="2025-12-05 05:38:48.665934766 +0000 UTC m=+730.902665034" lastFinishedPulling="2025-12-05 05:38:52.294202675 +0000 UTC m=+734.530932942" observedRunningTime="2025-12-05 05:38:53.237757352 +0000 UTC m=+735.474487629" watchObservedRunningTime="2025-12-05 05:38:53.240362913 +0000 UTC m=+735.477093179" Dec 05 05:39:04 crc kubenswrapper[4652]: I1205 05:39:04.150302 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:39:04 crc kubenswrapper[4652]: I1205 05:39:04.150740 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:39:04 crc kubenswrapper[4652]: I1205 05:39:04.150790 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:39:04 crc kubenswrapper[4652]: I1205 05:39:04.151334 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d6fbad51d9fa83b6ff2e59f87dc8d974ebb1fcdb8dee5f5d5ecbd852c4da84af"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:39:04 crc kubenswrapper[4652]: I1205 05:39:04.151387 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://d6fbad51d9fa83b6ff2e59f87dc8d974ebb1fcdb8dee5f5d5ecbd852c4da84af" gracePeriod=600 Dec 05 05:39:05 crc kubenswrapper[4652]: I1205 05:39:05.270837 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="d6fbad51d9fa83b6ff2e59f87dc8d974ebb1fcdb8dee5f5d5ecbd852c4da84af" exitCode=0 Dec 05 05:39:05 crc kubenswrapper[4652]: I1205 05:39:05.270878 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"d6fbad51d9fa83b6ff2e59f87dc8d974ebb1fcdb8dee5f5d5ecbd852c4da84af"} Dec 05 05:39:05 crc kubenswrapper[4652]: I1205 05:39:05.271214 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"30c3258c2639df7b2105ca40ba8c89dc3f283f9909c72ed9341ff06120095f7d"} Dec 05 05:39:05 crc kubenswrapper[4652]: I1205 05:39:05.271240 4652 scope.go:117] "RemoveContainer" containerID="7e133200e3f39bb8a8ffb76a76a2e5bfeff1444170459551c8b4e9c460f0294f" Dec 05 05:39:08 crc kubenswrapper[4652]: I1205 05:39:08.643349 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5bf4c9cd7b-w67kv" Dec 05 05:39:15 crc kubenswrapper[4652]: I1205 05:39:15.557004 4652 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.274331 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-75747b5f89-glsxz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.793517 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv"] Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.796511 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.799399 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-xrzvq" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.799617 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.800834 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-zs7fz"] Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.813916 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jvpr\" (UniqueName: \"kubernetes.io/projected/ca3b9554-659a-435e-9349-4284c9130a23-kube-api-access-2jvpr\") pod \"frr-k8s-webhook-server-7fcb986d4-fmrxv\" (UID: \"ca3b9554-659a-435e-9349-4284c9130a23\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.813996 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca3b9554-659a-435e-9349-4284c9130a23-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-fmrxv\" (UID: \"ca3b9554-659a-435e-9349-4284c9130a23\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.814700 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.816510 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.816766 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.820013 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv"] Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.869225 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-w6w66"] Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.870248 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-w6w66" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.871753 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-fmrq7" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.872125 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.872367 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.872836 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.885522 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-zjbfw"] Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.886469 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.890736 4652 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.897910 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-zjbfw"] Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914357 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/88cc55da-6280-437e-b2ab-932afe3de7aa-metallb-excludel2\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914396 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv9pc\" (UniqueName: \"kubernetes.io/projected/410bec27-9b9a-41dd-b45e-a2c9edcba338-kube-api-access-tv9pc\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914422 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-frr-conf\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914445 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/410bec27-9b9a-41dd-b45e-a2c9edcba338-cert\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914459 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/210880ed-8b13-4c81-ae22-68fd50621fda-metrics-certs\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914472 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-metrics-certs\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914497 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-metrics\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914512 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/210880ed-8b13-4c81-ae22-68fd50621fda-frr-startup\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914526 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjtt5\" (UniqueName: \"kubernetes.io/projected/88cc55da-6280-437e-b2ab-932afe3de7aa-kube-api-access-vjtt5\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914540 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-reloader\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914580 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jvpr\" (UniqueName: \"kubernetes.io/projected/ca3b9554-659a-435e-9349-4284c9130a23-kube-api-access-2jvpr\") pod \"frr-k8s-webhook-server-7fcb986d4-fmrxv\" (UID: \"ca3b9554-659a-435e-9349-4284c9130a23\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914604 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914619 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/410bec27-9b9a-41dd-b45e-a2c9edcba338-metrics-certs\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914655 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7smm\" (UniqueName: \"kubernetes.io/projected/210880ed-8b13-4c81-ae22-68fd50621fda-kube-api-access-b7smm\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914675 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-frr-sockets\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.914692 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca3b9554-659a-435e-9349-4284c9130a23-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-fmrxv\" (UID: \"ca3b9554-659a-435e-9349-4284c9130a23\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:28 crc kubenswrapper[4652]: E1205 05:39:28.914777 4652 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 05 05:39:28 crc kubenswrapper[4652]: E1205 05:39:28.914812 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ca3b9554-659a-435e-9349-4284c9130a23-cert podName:ca3b9554-659a-435e-9349-4284c9130a23 nodeName:}" failed. No retries permitted until 2025-12-05 05:39:29.414799307 +0000 UTC m=+771.651529573 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ca3b9554-659a-435e-9349-4284c9130a23-cert") pod "frr-k8s-webhook-server-7fcb986d4-fmrxv" (UID: "ca3b9554-659a-435e-9349-4284c9130a23") : secret "frr-k8s-webhook-server-cert" not found Dec 05 05:39:28 crc kubenswrapper[4652]: I1205 05:39:28.931381 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jvpr\" (UniqueName: \"kubernetes.io/projected/ca3b9554-659a-435e-9349-4284c9130a23-kube-api-access-2jvpr\") pod \"frr-k8s-webhook-server-7fcb986d4-fmrxv\" (UID: \"ca3b9554-659a-435e-9349-4284c9130a23\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015755 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/410bec27-9b9a-41dd-b45e-a2c9edcba338-cert\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015798 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/210880ed-8b13-4c81-ae22-68fd50621fda-metrics-certs\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015818 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-metrics-certs\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015854 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-metrics\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015872 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/210880ed-8b13-4c81-ae22-68fd50621fda-frr-startup\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015887 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjtt5\" (UniqueName: \"kubernetes.io/projected/88cc55da-6280-437e-b2ab-932afe3de7aa-kube-api-access-vjtt5\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015905 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-reloader\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015931 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015944 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/410bec27-9b9a-41dd-b45e-a2c9edcba338-metrics-certs\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015966 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7smm\" (UniqueName: \"kubernetes.io/projected/210880ed-8b13-4c81-ae22-68fd50621fda-kube-api-access-b7smm\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.015984 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-frr-sockets\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.016012 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/88cc55da-6280-437e-b2ab-932afe3de7aa-metallb-excludel2\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.016033 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv9pc\" (UniqueName: \"kubernetes.io/projected/410bec27-9b9a-41dd-b45e-a2c9edcba338-kube-api-access-tv9pc\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.016052 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-frr-conf\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: E1205 05:39:29.016339 4652 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.016366 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-frr-conf\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: E1205 05:39:29.016405 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist podName:88cc55da-6280-437e-b2ab-932afe3de7aa nodeName:}" failed. No retries permitted until 2025-12-05 05:39:29.516387799 +0000 UTC m=+771.753118066 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist") pod "speaker-w6w66" (UID: "88cc55da-6280-437e-b2ab-932afe3de7aa") : secret "metallb-memberlist" not found Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.016586 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-frr-sockets\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.016745 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-metrics\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.016854 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/210880ed-8b13-4c81-ae22-68fd50621fda-reloader\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.017306 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/210880ed-8b13-4c81-ae22-68fd50621fda-frr-startup\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.018003 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/88cc55da-6280-437e-b2ab-932afe3de7aa-metallb-excludel2\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.019904 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/410bec27-9b9a-41dd-b45e-a2c9edcba338-cert\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.019938 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/210880ed-8b13-4c81-ae22-68fd50621fda-metrics-certs\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.020026 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/410bec27-9b9a-41dd-b45e-a2c9edcba338-metrics-certs\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.021947 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-metrics-certs\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.030088 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7smm\" (UniqueName: \"kubernetes.io/projected/210880ed-8b13-4c81-ae22-68fd50621fda-kube-api-access-b7smm\") pod \"frr-k8s-zs7fz\" (UID: \"210880ed-8b13-4c81-ae22-68fd50621fda\") " pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.030183 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv9pc\" (UniqueName: \"kubernetes.io/projected/410bec27-9b9a-41dd-b45e-a2c9edcba338-kube-api-access-tv9pc\") pod \"controller-f8648f98b-zjbfw\" (UID: \"410bec27-9b9a-41dd-b45e-a2c9edcba338\") " pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.031627 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjtt5\" (UniqueName: \"kubernetes.io/projected/88cc55da-6280-437e-b2ab-932afe3de7aa-kube-api-access-vjtt5\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.133336 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.203418 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.387568 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerStarted","Data":"512b6332235c7a82300551de62f911ecb3ec36a3b7f59e6d1d860e9bf7d4831f"} Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.419812 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca3b9554-659a-435e-9349-4284c9130a23-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-fmrxv\" (UID: \"ca3b9554-659a-435e-9349-4284c9130a23\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.423756 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca3b9554-659a-435e-9349-4284c9130a23-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-fmrxv\" (UID: \"ca3b9554-659a-435e-9349-4284c9130a23\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.425507 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.520756 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:29 crc kubenswrapper[4652]: E1205 05:39:29.521080 4652 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 05:39:29 crc kubenswrapper[4652]: E1205 05:39:29.521126 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist podName:88cc55da-6280-437e-b2ab-932afe3de7aa nodeName:}" failed. No retries permitted until 2025-12-05 05:39:30.521113518 +0000 UTC m=+772.757843785 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist") pod "speaker-w6w66" (UID: "88cc55da-6280-437e-b2ab-932afe3de7aa") : secret "metallb-memberlist" not found Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.533667 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-zjbfw"] Dec 05 05:39:29 crc kubenswrapper[4652]: W1205 05:39:29.541208 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod410bec27_9b9a_41dd_b45e_a2c9edcba338.slice/crio-11c17371e18a2f7c31e5ebd94bf2d2c6e706acb7fe4951d48c4f7326c2dfbc21 WatchSource:0}: Error finding container 11c17371e18a2f7c31e5ebd94bf2d2c6e706acb7fe4951d48c4f7326c2dfbc21: Status 404 returned error can't find the container with id 11c17371e18a2f7c31e5ebd94bf2d2c6e706acb7fe4951d48c4f7326c2dfbc21 Dec 05 05:39:29 crc kubenswrapper[4652]: I1205 05:39:29.762275 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv"] Dec 05 05:39:29 crc kubenswrapper[4652]: W1205 05:39:29.766011 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca3b9554_659a_435e_9349_4284c9130a23.slice/crio-2e9accd2df149e5a9a1077340e8790002450ef057ae91cf9a131b4fc38e618e0 WatchSource:0}: Error finding container 2e9accd2df149e5a9a1077340e8790002450ef057ae91cf9a131b4fc38e618e0: Status 404 returned error can't find the container with id 2e9accd2df149e5a9a1077340e8790002450ef057ae91cf9a131b4fc38e618e0 Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.392716 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" event={"ID":"ca3b9554-659a-435e-9349-4284c9130a23","Type":"ContainerStarted","Data":"2e9accd2df149e5a9a1077340e8790002450ef057ae91cf9a131b4fc38e618e0"} Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.394189 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zjbfw" event={"ID":"410bec27-9b9a-41dd-b45e-a2c9edcba338","Type":"ContainerStarted","Data":"517226ce081c1ef6d10b1357d7727ce8613b5ad0b7a5459c0c1d644536e71e81"} Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.394227 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zjbfw" event={"ID":"410bec27-9b9a-41dd-b45e-a2c9edcba338","Type":"ContainerStarted","Data":"a45c67229fab23e119210125c8ea9a2fd12e10fe2d2d5e1c742991bf4a5a89bd"} Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.394238 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zjbfw" event={"ID":"410bec27-9b9a-41dd-b45e-a2c9edcba338","Type":"ContainerStarted","Data":"11c17371e18a2f7c31e5ebd94bf2d2c6e706acb7fe4951d48c4f7326c2dfbc21"} Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.394275 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.406992 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-zjbfw" podStartSLOduration=2.406979931 podStartE2EDuration="2.406979931s" podCreationTimestamp="2025-12-05 05:39:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:39:30.403594354 +0000 UTC m=+772.640324621" watchObservedRunningTime="2025-12-05 05:39:30.406979931 +0000 UTC m=+772.643710198" Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.534634 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.539099 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/88cc55da-6280-437e-b2ab-932afe3de7aa-memberlist\") pod \"speaker-w6w66\" (UID: \"88cc55da-6280-437e-b2ab-932afe3de7aa\") " pod="metallb-system/speaker-w6w66" Dec 05 05:39:30 crc kubenswrapper[4652]: I1205 05:39:30.681219 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-w6w66" Dec 05 05:39:30 crc kubenswrapper[4652]: W1205 05:39:30.704153 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88cc55da_6280_437e_b2ab_932afe3de7aa.slice/crio-79f1577ba1aef892328cb7a81974f85e3208518e62675482e16464e4bf6c19b0 WatchSource:0}: Error finding container 79f1577ba1aef892328cb7a81974f85e3208518e62675482e16464e4bf6c19b0: Status 404 returned error can't find the container with id 79f1577ba1aef892328cb7a81974f85e3208518e62675482e16464e4bf6c19b0 Dec 05 05:39:31 crc kubenswrapper[4652]: I1205 05:39:31.408665 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-w6w66" event={"ID":"88cc55da-6280-437e-b2ab-932afe3de7aa","Type":"ContainerStarted","Data":"9d0fec8bf08b239b210062584b2d07e257b9b22a9b28519cb920d78e0d6ac857"} Dec 05 05:39:31 crc kubenswrapper[4652]: I1205 05:39:31.408965 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-w6w66" event={"ID":"88cc55da-6280-437e-b2ab-932afe3de7aa","Type":"ContainerStarted","Data":"dfdd348c0441de70decb7ac57f5b91ca20339c5f36613b15d43f9579e4a73f3d"} Dec 05 05:39:31 crc kubenswrapper[4652]: I1205 05:39:31.408978 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-w6w66" event={"ID":"88cc55da-6280-437e-b2ab-932afe3de7aa","Type":"ContainerStarted","Data":"79f1577ba1aef892328cb7a81974f85e3208518e62675482e16464e4bf6c19b0"} Dec 05 05:39:31 crc kubenswrapper[4652]: I1205 05:39:31.409680 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-w6w66" Dec 05 05:39:36 crc kubenswrapper[4652]: I1205 05:39:36.436206 4652 generic.go:334] "Generic (PLEG): container finished" podID="210880ed-8b13-4c81-ae22-68fd50621fda" containerID="6ca0c11f049c25aee6f0db5852a47ecb1d295b503e2f66b9232626286de2cc7f" exitCode=0 Dec 05 05:39:36 crc kubenswrapper[4652]: I1205 05:39:36.436247 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerDied","Data":"6ca0c11f049c25aee6f0db5852a47ecb1d295b503e2f66b9232626286de2cc7f"} Dec 05 05:39:36 crc kubenswrapper[4652]: I1205 05:39:36.438841 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" event={"ID":"ca3b9554-659a-435e-9349-4284c9130a23","Type":"ContainerStarted","Data":"a3e5b7a0ff8c049c6ab613b760e926efa822307d48c9e4c560110fa5d110d9f6"} Dec 05 05:39:36 crc kubenswrapper[4652]: I1205 05:39:36.438960 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:36 crc kubenswrapper[4652]: I1205 05:39:36.455756 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-w6w66" podStartSLOduration=8.45574479 podStartE2EDuration="8.45574479s" podCreationTimestamp="2025-12-05 05:39:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:39:31.431006544 +0000 UTC m=+773.667736811" watchObservedRunningTime="2025-12-05 05:39:36.45574479 +0000 UTC m=+778.692475056" Dec 05 05:39:36 crc kubenswrapper[4652]: I1205 05:39:36.465917 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" podStartSLOduration=2.845622049 podStartE2EDuration="8.465908175s" podCreationTimestamp="2025-12-05 05:39:28 +0000 UTC" firstStartedPulling="2025-12-05 05:39:29.767762134 +0000 UTC m=+772.004492402" lastFinishedPulling="2025-12-05 05:39:35.388048262 +0000 UTC m=+777.624778528" observedRunningTime="2025-12-05 05:39:36.464091499 +0000 UTC m=+778.700821766" watchObservedRunningTime="2025-12-05 05:39:36.465908175 +0000 UTC m=+778.702638442" Dec 05 05:39:37 crc kubenswrapper[4652]: I1205 05:39:37.445786 4652 generic.go:334] "Generic (PLEG): container finished" podID="210880ed-8b13-4c81-ae22-68fd50621fda" containerID="071d5ba37b1462b05e9d027aa55616c3c9d59d0345e83b90d7f49d78eec7b790" exitCode=0 Dec 05 05:39:37 crc kubenswrapper[4652]: I1205 05:39:37.445883 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerDied","Data":"071d5ba37b1462b05e9d027aa55616c3c9d59d0345e83b90d7f49d78eec7b790"} Dec 05 05:39:38 crc kubenswrapper[4652]: I1205 05:39:38.451982 4652 generic.go:334] "Generic (PLEG): container finished" podID="210880ed-8b13-4c81-ae22-68fd50621fda" containerID="5736c65f8308b69879f43d0de477c1864e814786e29a8b8bd947e558088c8bce" exitCode=0 Dec 05 05:39:38 crc kubenswrapper[4652]: I1205 05:39:38.452044 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerDied","Data":"5736c65f8308b69879f43d0de477c1864e814786e29a8b8bd947e558088c8bce"} Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.207051 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-zjbfw" Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.460016 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerStarted","Data":"759841f89e67ad8f0f3f9a2dd51c250749b271651cc411ca3c63ab81f8b39c7b"} Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.460245 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerStarted","Data":"89fd668701c683c9c4b51b539e7b5af82128f7a79f9ffe237a3d825d94b136d7"} Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.460257 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerStarted","Data":"76f90f056b4e17e6ccfce2425454efd81b9fda8497b2a980d780fbff950032a0"} Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.460265 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerStarted","Data":"eee6ec2bf5a24d42234a75c09b6b456fc8718506e69632988bae23db9b986fac"} Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.460277 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.460285 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerStarted","Data":"98577af41ef79c4a1cbb25d0712fe98c55289f04895fe1ff1cf316eb9629008a"} Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.460292 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zs7fz" event={"ID":"210880ed-8b13-4c81-ae22-68fd50621fda","Type":"ContainerStarted","Data":"5566d8e8a55a2876679b0d29bd655f3e7b1c7f708772f3633333c9adcc4be2ee"} Dec 05 05:39:39 crc kubenswrapper[4652]: I1205 05:39:39.475804 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-zs7fz" podStartSLOduration=5.31519274 podStartE2EDuration="11.475789225s" podCreationTimestamp="2025-12-05 05:39:28 +0000 UTC" firstStartedPulling="2025-12-05 05:39:29.241848044 +0000 UTC m=+771.478578311" lastFinishedPulling="2025-12-05 05:39:35.402444529 +0000 UTC m=+777.639174796" observedRunningTime="2025-12-05 05:39:39.474203382 +0000 UTC m=+781.710933649" watchObservedRunningTime="2025-12-05 05:39:39.475789225 +0000 UTC m=+781.712519492" Dec 05 05:39:40 crc kubenswrapper[4652]: I1205 05:39:40.683881 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-w6w66" Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.854772 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-qb8kw"] Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.855680 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qb8kw" Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.857913 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-gkr5k" Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.860025 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.863120 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.863132 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qb8kw"] Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.880745 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc4f2\" (UniqueName: \"kubernetes.io/projected/c12f1423-3752-41ee-8e5a-233aeffc9a1c-kube-api-access-jc4f2\") pod \"openstack-operator-index-qb8kw\" (UID: \"c12f1423-3752-41ee-8e5a-233aeffc9a1c\") " pod="openstack-operators/openstack-operator-index-qb8kw" Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.981883 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc4f2\" (UniqueName: \"kubernetes.io/projected/c12f1423-3752-41ee-8e5a-233aeffc9a1c-kube-api-access-jc4f2\") pod \"openstack-operator-index-qb8kw\" (UID: \"c12f1423-3752-41ee-8e5a-233aeffc9a1c\") " pod="openstack-operators/openstack-operator-index-qb8kw" Dec 05 05:39:42 crc kubenswrapper[4652]: I1205 05:39:42.996496 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc4f2\" (UniqueName: \"kubernetes.io/projected/c12f1423-3752-41ee-8e5a-233aeffc9a1c-kube-api-access-jc4f2\") pod \"openstack-operator-index-qb8kw\" (UID: \"c12f1423-3752-41ee-8e5a-233aeffc9a1c\") " pod="openstack-operators/openstack-operator-index-qb8kw" Dec 05 05:39:43 crc kubenswrapper[4652]: I1205 05:39:43.171813 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qb8kw" Dec 05 05:39:43 crc kubenswrapper[4652]: I1205 05:39:43.965414 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qb8kw"] Dec 05 05:39:43 crc kubenswrapper[4652]: W1205 05:39:43.967846 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc12f1423_3752_41ee_8e5a_233aeffc9a1c.slice/crio-12b86fd10dded7f4e735bf65a8f437a4228855d660f9aeb33cf0c633fcf66a1e WatchSource:0}: Error finding container 12b86fd10dded7f4e735bf65a8f437a4228855d660f9aeb33cf0c633fcf66a1e: Status 404 returned error can't find the container with id 12b86fd10dded7f4e735bf65a8f437a4228855d660f9aeb33cf0c633fcf66a1e Dec 05 05:39:44 crc kubenswrapper[4652]: I1205 05:39:44.134203 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:44 crc kubenswrapper[4652]: I1205 05:39:44.164384 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:44 crc kubenswrapper[4652]: I1205 05:39:44.482182 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qb8kw" event={"ID":"c12f1423-3752-41ee-8e5a-233aeffc9a1c","Type":"ContainerStarted","Data":"12b86fd10dded7f4e735bf65a8f437a4228855d660f9aeb33cf0c633fcf66a1e"} Dec 05 05:39:45 crc kubenswrapper[4652]: I1205 05:39:45.486912 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qb8kw" event={"ID":"c12f1423-3752-41ee-8e5a-233aeffc9a1c","Type":"ContainerStarted","Data":"9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167"} Dec 05 05:39:45 crc kubenswrapper[4652]: I1205 05:39:45.500059 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-qb8kw" podStartSLOduration=2.644597504 podStartE2EDuration="3.500046408s" podCreationTimestamp="2025-12-05 05:39:42 +0000 UTC" firstStartedPulling="2025-12-05 05:39:43.969416836 +0000 UTC m=+786.206147093" lastFinishedPulling="2025-12-05 05:39:44.82486573 +0000 UTC m=+787.061595997" observedRunningTime="2025-12-05 05:39:45.49664511 +0000 UTC m=+787.733375378" watchObservedRunningTime="2025-12-05 05:39:45.500046408 +0000 UTC m=+787.736776674" Dec 05 05:39:46 crc kubenswrapper[4652]: I1205 05:39:46.239547 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qb8kw"] Dec 05 05:39:46 crc kubenswrapper[4652]: I1205 05:39:46.843118 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-5ghq4"] Dec 05 05:39:46 crc kubenswrapper[4652]: I1205 05:39:46.843801 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:39:46 crc kubenswrapper[4652]: I1205 05:39:46.848491 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5ghq4"] Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.029341 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wslbh\" (UniqueName: \"kubernetes.io/projected/fa35b992-e6b3-49d5-93d0-be938e9a9119-kube-api-access-wslbh\") pod \"openstack-operator-index-5ghq4\" (UID: \"fa35b992-e6b3-49d5-93d0-be938e9a9119\") " pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.130717 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wslbh\" (UniqueName: \"kubernetes.io/projected/fa35b992-e6b3-49d5-93d0-be938e9a9119-kube-api-access-wslbh\") pod \"openstack-operator-index-5ghq4\" (UID: \"fa35b992-e6b3-49d5-93d0-be938e9a9119\") " pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.146420 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wslbh\" (UniqueName: \"kubernetes.io/projected/fa35b992-e6b3-49d5-93d0-be938e9a9119-kube-api-access-wslbh\") pod \"openstack-operator-index-5ghq4\" (UID: \"fa35b992-e6b3-49d5-93d0-be938e9a9119\") " pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.157185 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.488962 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5ghq4"] Dec 05 05:39:47 crc kubenswrapper[4652]: W1205 05:39:47.492019 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa35b992_e6b3_49d5_93d0_be938e9a9119.slice/crio-b13aeb3773648a6a15f9b175970abf7a57be71ea6d3938bddeaf56d77f3180cf WatchSource:0}: Error finding container b13aeb3773648a6a15f9b175970abf7a57be71ea6d3938bddeaf56d77f3180cf: Status 404 returned error can't find the container with id b13aeb3773648a6a15f9b175970abf7a57be71ea6d3938bddeaf56d77f3180cf Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.495873 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-qb8kw" podUID="c12f1423-3752-41ee-8e5a-233aeffc9a1c" containerName="registry-server" containerID="cri-o://9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167" gracePeriod=2 Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.755672 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qb8kw" Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.938544 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc4f2\" (UniqueName: \"kubernetes.io/projected/c12f1423-3752-41ee-8e5a-233aeffc9a1c-kube-api-access-jc4f2\") pod \"c12f1423-3752-41ee-8e5a-233aeffc9a1c\" (UID: \"c12f1423-3752-41ee-8e5a-233aeffc9a1c\") " Dec 05 05:39:47 crc kubenswrapper[4652]: I1205 05:39:47.942178 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c12f1423-3752-41ee-8e5a-233aeffc9a1c-kube-api-access-jc4f2" (OuterVolumeSpecName: "kube-api-access-jc4f2") pod "c12f1423-3752-41ee-8e5a-233aeffc9a1c" (UID: "c12f1423-3752-41ee-8e5a-233aeffc9a1c"). InnerVolumeSpecName "kube-api-access-jc4f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.039751 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc4f2\" (UniqueName: \"kubernetes.io/projected/c12f1423-3752-41ee-8e5a-233aeffc9a1c-kube-api-access-jc4f2\") on node \"crc\" DevicePath \"\"" Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.505419 4652 generic.go:334] "Generic (PLEG): container finished" podID="c12f1423-3752-41ee-8e5a-233aeffc9a1c" containerID="9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167" exitCode=0 Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.505476 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qb8kw" Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.505466 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qb8kw" event={"ID":"c12f1423-3752-41ee-8e5a-233aeffc9a1c","Type":"ContainerDied","Data":"9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167"} Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.506010 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qb8kw" event={"ID":"c12f1423-3752-41ee-8e5a-233aeffc9a1c","Type":"ContainerDied","Data":"12b86fd10dded7f4e735bf65a8f437a4228855d660f9aeb33cf0c633fcf66a1e"} Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.506040 4652 scope.go:117] "RemoveContainer" containerID="9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167" Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.507118 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5ghq4" event={"ID":"fa35b992-e6b3-49d5-93d0-be938e9a9119","Type":"ContainerStarted","Data":"b13aeb3773648a6a15f9b175970abf7a57be71ea6d3938bddeaf56d77f3180cf"} Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.519939 4652 scope.go:117] "RemoveContainer" containerID="9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167" Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.520644 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qb8kw"] Dec 05 05:39:48 crc kubenswrapper[4652]: E1205 05:39:48.520875 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167\": container with ID starting with 9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167 not found: ID does not exist" containerID="9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167" Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.520923 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167"} err="failed to get container status \"9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167\": rpc error: code = NotFound desc = could not find container \"9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167\": container with ID starting with 9347ce5c1d8a5cb88e6927f64c5a686370f5ee198b1f1af9148984bd96564167 not found: ID does not exist" Dec 05 05:39:48 crc kubenswrapper[4652]: I1205 05:39:48.524134 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-qb8kw"] Dec 05 05:39:49 crc kubenswrapper[4652]: I1205 05:39:49.136203 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-zs7fz" Dec 05 05:39:49 crc kubenswrapper[4652]: I1205 05:39:49.429612 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-fmrxv" Dec 05 05:39:49 crc kubenswrapper[4652]: I1205 05:39:49.514109 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5ghq4" event={"ID":"fa35b992-e6b3-49d5-93d0-be938e9a9119","Type":"ContainerStarted","Data":"472616b0eca79d91043b0bdc2f53096eabc9f3825e8fcd756f0be30f5bfb443a"} Dec 05 05:39:49 crc kubenswrapper[4652]: I1205 05:39:49.526219 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-5ghq4" podStartSLOduration=3.005410684 podStartE2EDuration="3.526207741s" podCreationTimestamp="2025-12-05 05:39:46 +0000 UTC" firstStartedPulling="2025-12-05 05:39:47.495151705 +0000 UTC m=+789.731881972" lastFinishedPulling="2025-12-05 05:39:48.015948762 +0000 UTC m=+790.252679029" observedRunningTime="2025-12-05 05:39:49.524041215 +0000 UTC m=+791.760771482" watchObservedRunningTime="2025-12-05 05:39:49.526207741 +0000 UTC m=+791.762938007" Dec 05 05:39:50 crc kubenswrapper[4652]: I1205 05:39:50.131373 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c12f1423-3752-41ee-8e5a-233aeffc9a1c" path="/var/lib/kubelet/pods/c12f1423-3752-41ee-8e5a-233aeffc9a1c/volumes" Dec 05 05:39:57 crc kubenswrapper[4652]: I1205 05:39:57.157485 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:39:57 crc kubenswrapper[4652]: I1205 05:39:57.157877 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:39:57 crc kubenswrapper[4652]: I1205 05:39:57.177887 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:39:57 crc kubenswrapper[4652]: I1205 05:39:57.566978 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-5ghq4" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.286144 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl"] Dec 05 05:40:04 crc kubenswrapper[4652]: E1205 05:40:04.286511 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c12f1423-3752-41ee-8e5a-233aeffc9a1c" containerName="registry-server" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.286530 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c12f1423-3752-41ee-8e5a-233aeffc9a1c" containerName="registry-server" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.286676 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c12f1423-3752-41ee-8e5a-233aeffc9a1c" containerName="registry-server" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.287378 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.288660 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-w2spg" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.291246 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-bundle\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.291363 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgfb7\" (UniqueName: \"kubernetes.io/projected/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-kube-api-access-kgfb7\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.291445 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-util\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.291304 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl"] Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.392547 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-bundle\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.392620 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgfb7\" (UniqueName: \"kubernetes.io/projected/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-kube-api-access-kgfb7\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.392643 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-util\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.392896 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-bundle\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.392931 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-util\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.407201 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgfb7\" (UniqueName: \"kubernetes.io/projected/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-kube-api-access-kgfb7\") pod \"917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.600943 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:04 crc kubenswrapper[4652]: I1205 05:40:04.944285 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl"] Dec 05 05:40:05 crc kubenswrapper[4652]: I1205 05:40:05.582823 4652 generic.go:334] "Generic (PLEG): container finished" podID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerID="b15cdaec2631ebea9aeea6d3eab087b94b93c3f75eb4d99b0cf3d55ab132d427" exitCode=0 Dec 05 05:40:05 crc kubenswrapper[4652]: I1205 05:40:05.582864 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" event={"ID":"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915","Type":"ContainerDied","Data":"b15cdaec2631ebea9aeea6d3eab087b94b93c3f75eb4d99b0cf3d55ab132d427"} Dec 05 05:40:05 crc kubenswrapper[4652]: I1205 05:40:05.582994 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" event={"ID":"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915","Type":"ContainerStarted","Data":"0a1410e4e616a94d8fc7a5b24d46aa2b29271f58554b0f2049a05d42fe2b3f62"} Dec 05 05:40:07 crc kubenswrapper[4652]: I1205 05:40:07.594219 4652 generic.go:334] "Generic (PLEG): container finished" podID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerID="06f3f170b57e7c5639e5c7f468966bb1c95be15897652a2f48981d822885ec0f" exitCode=0 Dec 05 05:40:07 crc kubenswrapper[4652]: I1205 05:40:07.594256 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" event={"ID":"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915","Type":"ContainerDied","Data":"06f3f170b57e7c5639e5c7f468966bb1c95be15897652a2f48981d822885ec0f"} Dec 05 05:40:08 crc kubenswrapper[4652]: I1205 05:40:08.601351 4652 generic.go:334] "Generic (PLEG): container finished" podID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerID="e5a4ffd98cdddc3fd74c45062a2a4b376746f3b84d6e002d26b48f5a4c46f7fb" exitCode=0 Dec 05 05:40:08 crc kubenswrapper[4652]: I1205 05:40:08.601414 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" event={"ID":"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915","Type":"ContainerDied","Data":"e5a4ffd98cdddc3fd74c45062a2a4b376746f3b84d6e002d26b48f5a4c46f7fb"} Dec 05 05:40:09 crc kubenswrapper[4652]: I1205 05:40:09.809274 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:09 crc kubenswrapper[4652]: I1205 05:40:09.954323 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-bundle\") pod \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " Dec 05 05:40:09 crc kubenswrapper[4652]: I1205 05:40:09.954393 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-util\") pod \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " Dec 05 05:40:09 crc kubenswrapper[4652]: I1205 05:40:09.954425 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgfb7\" (UniqueName: \"kubernetes.io/projected/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-kube-api-access-kgfb7\") pod \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\" (UID: \"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915\") " Dec 05 05:40:09 crc kubenswrapper[4652]: I1205 05:40:09.955170 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-bundle" (OuterVolumeSpecName: "bundle") pod "50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" (UID: "50ba4cc5-ef20-4d56-83b7-8c5eefd7a915"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:40:09 crc kubenswrapper[4652]: I1205 05:40:09.959039 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-kube-api-access-kgfb7" (OuterVolumeSpecName: "kube-api-access-kgfb7") pod "50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" (UID: "50ba4cc5-ef20-4d56-83b7-8c5eefd7a915"). InnerVolumeSpecName "kube-api-access-kgfb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:40:09 crc kubenswrapper[4652]: I1205 05:40:09.964520 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-util" (OuterVolumeSpecName: "util") pod "50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" (UID: "50ba4cc5-ef20-4d56-83b7-8c5eefd7a915"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:40:10 crc kubenswrapper[4652]: I1205 05:40:10.055551 4652 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:40:10 crc kubenswrapper[4652]: I1205 05:40:10.055623 4652 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-util\") on node \"crc\" DevicePath \"\"" Dec 05 05:40:10 crc kubenswrapper[4652]: I1205 05:40:10.055633 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgfb7\" (UniqueName: \"kubernetes.io/projected/50ba4cc5-ef20-4d56-83b7-8c5eefd7a915-kube-api-access-kgfb7\") on node \"crc\" DevicePath \"\"" Dec 05 05:40:10 crc kubenswrapper[4652]: I1205 05:40:10.611653 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" event={"ID":"50ba4cc5-ef20-4d56-83b7-8c5eefd7a915","Type":"ContainerDied","Data":"0a1410e4e616a94d8fc7a5b24d46aa2b29271f58554b0f2049a05d42fe2b3f62"} Dec 05 05:40:10 crc kubenswrapper[4652]: I1205 05:40:10.611700 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a1410e4e616a94d8fc7a5b24d46aa2b29271f58554b0f2049a05d42fe2b3f62" Dec 05 05:40:10 crc kubenswrapper[4652]: I1205 05:40:10.611710 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.437418 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp"] Dec 05 05:40:16 crc kubenswrapper[4652]: E1205 05:40:16.437887 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerName="pull" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.437902 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerName="pull" Dec 05 05:40:16 crc kubenswrapper[4652]: E1205 05:40:16.437925 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerName="util" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.437930 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerName="util" Dec 05 05:40:16 crc kubenswrapper[4652]: E1205 05:40:16.437937 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerName="extract" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.437943 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerName="extract" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.438051 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="50ba4cc5-ef20-4d56-83b7-8c5eefd7a915" containerName="extract" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.438482 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.443690 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-xqmwp" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.457801 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp"] Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.529474 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7nv7\" (UniqueName: \"kubernetes.io/projected/bd458eaf-351c-46ec-950e-e02dec9040b9-kube-api-access-f7nv7\") pod \"openstack-operator-controller-operator-55b6fb9447-xrwdp\" (UID: \"bd458eaf-351c-46ec-950e-e02dec9040b9\") " pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.630820 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7nv7\" (UniqueName: \"kubernetes.io/projected/bd458eaf-351c-46ec-950e-e02dec9040b9-kube-api-access-f7nv7\") pod \"openstack-operator-controller-operator-55b6fb9447-xrwdp\" (UID: \"bd458eaf-351c-46ec-950e-e02dec9040b9\") " pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.645754 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7nv7\" (UniqueName: \"kubernetes.io/projected/bd458eaf-351c-46ec-950e-e02dec9040b9-kube-api-access-f7nv7\") pod \"openstack-operator-controller-operator-55b6fb9447-xrwdp\" (UID: \"bd458eaf-351c-46ec-950e-e02dec9040b9\") " pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.752345 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" Dec 05 05:40:16 crc kubenswrapper[4652]: I1205 05:40:16.910310 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp"] Dec 05 05:40:17 crc kubenswrapper[4652]: I1205 05:40:17.643478 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" event={"ID":"bd458eaf-351c-46ec-950e-e02dec9040b9","Type":"ContainerStarted","Data":"69359f0693a1b1ae45d58e62adc0fae14ada3e492c5005ff7c38dce9c9a26cb6"} Dec 05 05:40:21 crc kubenswrapper[4652]: I1205 05:40:21.673169 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" event={"ID":"bd458eaf-351c-46ec-950e-e02dec9040b9","Type":"ContainerStarted","Data":"501d76311b5b358a5c9c3f58376d8cf7e580c9f07d0cce919530dbd9dddb5765"} Dec 05 05:40:21 crc kubenswrapper[4652]: I1205 05:40:21.673887 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" Dec 05 05:40:21 crc kubenswrapper[4652]: I1205 05:40:21.699502 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" podStartSLOduration=1.773694729 podStartE2EDuration="5.699476332s" podCreationTimestamp="2025-12-05 05:40:16 +0000 UTC" firstStartedPulling="2025-12-05 05:40:16.918810467 +0000 UTC m=+819.155540735" lastFinishedPulling="2025-12-05 05:40:20.84459207 +0000 UTC m=+823.081322338" observedRunningTime="2025-12-05 05:40:21.694630399 +0000 UTC m=+823.931360665" watchObservedRunningTime="2025-12-05 05:40:21.699476332 +0000 UTC m=+823.936206599" Dec 05 05:40:26 crc kubenswrapper[4652]: I1205 05:40:26.755462 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-55b6fb9447-xrwdp" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.385016 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.386214 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.390512 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.391473 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.393790 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-shts4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.395030 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-l2lpv" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.400922 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.407132 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.408110 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.411708 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-r2b8g" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.412164 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.422160 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.425624 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.427477 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-mvp9c" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.430184 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.434874 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.436871 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.439674 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.441529 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-qsw6j" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.448102 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.457175 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.458117 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.460021 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-sfk4r" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.463155 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.464075 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.467458 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.470105 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk2tt\" (UniqueName: \"kubernetes.io/projected/9e53a822-217b-4037-b378-30ad7d875afd-kube-api-access-tk2tt\") pod \"heat-operator-controller-manager-5f64f6f8bb-m26j4\" (UID: \"9e53a822-217b-4037-b378-30ad7d875afd\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.470153 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwpx9\" (UniqueName: \"kubernetes.io/projected/b13a9032-c937-442f-b305-c3b3d3fad395-kube-api-access-fwpx9\") pod \"barbican-operator-controller-manager-7d9dfd778-txx52\" (UID: \"b13a9032-c937-442f-b305-c3b3d3fad395\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.470174 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5tgn\" (UniqueName: \"kubernetes.io/projected/e929a962-27ca-476f-9800-5bbd1f57a1d6-kube-api-access-t5tgn\") pod \"cinder-operator-controller-manager-859b6ccc6-tnlt9\" (UID: \"e929a962-27ca-476f-9800-5bbd1f57a1d6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.470200 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzkml\" (UniqueName: \"kubernetes.io/projected/bdb26248-b4db-48e7-8b0a-ecd525fae23e-kube-api-access-wzkml\") pod \"glance-operator-controller-manager-77987cd8cd-8qfkt\" (UID: \"bdb26248-b4db-48e7-8b0a-ecd525fae23e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.470229 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cvp2\" (UniqueName: \"kubernetes.io/projected/b25da2c0-4af8-4cf9-9c4d-1b15054e9b40-kube-api-access-5cvp2\") pod \"designate-operator-controller-manager-78b4bc895b-lmtl4\" (UID: \"b25da2c0-4af8-4cf9-9c4d-1b15054e9b40\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.472889 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gfhhg" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.493223 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.501871 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.508477 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-95dnf" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.510313 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.523714 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.527812 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.528783 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.531176 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-d7tjq" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.536056 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.537041 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.550479 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-7r5pl" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.561628 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572156 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwpx9\" (UniqueName: \"kubernetes.io/projected/b13a9032-c937-442f-b305-c3b3d3fad395-kube-api-access-fwpx9\") pod \"barbican-operator-controller-manager-7d9dfd778-txx52\" (UID: \"b13a9032-c937-442f-b305-c3b3d3fad395\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572193 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5tgn\" (UniqueName: \"kubernetes.io/projected/e929a962-27ca-476f-9800-5bbd1f57a1d6-kube-api-access-t5tgn\") pod \"cinder-operator-controller-manager-859b6ccc6-tnlt9\" (UID: \"e929a962-27ca-476f-9800-5bbd1f57a1d6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572229 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-225dv\" (UniqueName: \"kubernetes.io/projected/3cc8a91f-1019-4d91-89fa-46eca439c2b3-kube-api-access-225dv\") pod \"keystone-operator-controller-manager-7765d96ddf-b4ff5\" (UID: \"3cc8a91f-1019-4d91-89fa-46eca439c2b3\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572248 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzkml\" (UniqueName: \"kubernetes.io/projected/bdb26248-b4db-48e7-8b0a-ecd525fae23e-kube-api-access-wzkml\") pod \"glance-operator-controller-manager-77987cd8cd-8qfkt\" (UID: \"bdb26248-b4db-48e7-8b0a-ecd525fae23e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572266 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572280 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6v6h\" (UniqueName: \"kubernetes.io/projected/aad4570c-af5e-4ef6-b985-87eeab6d86be-kube-api-access-q6v6h\") pod \"manila-operator-controller-manager-7c79b5df47-dt6ws\" (UID: \"aad4570c-af5e-4ef6-b985-87eeab6d86be\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572310 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cvp2\" (UniqueName: \"kubernetes.io/projected/b25da2c0-4af8-4cf9-9c4d-1b15054e9b40-kube-api-access-5cvp2\") pod \"designate-operator-controller-manager-78b4bc895b-lmtl4\" (UID: \"b25da2c0-4af8-4cf9-9c4d-1b15054e9b40\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572329 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mr7b\" (UniqueName: \"kubernetes.io/projected/4bfee3f2-6683-4de8-9b17-765b4180603a-kube-api-access-4mr7b\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572357 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz5ch\" (UniqueName: \"kubernetes.io/projected/92782842-40da-49c5-a384-383efcfd71e1-kube-api-access-mz5ch\") pod \"horizon-operator-controller-manager-68c6d99b8f-57wt5\" (UID: \"92782842-40da-49c5-a384-383efcfd71e1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572386 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk2tt\" (UniqueName: \"kubernetes.io/projected/9e53a822-217b-4037-b378-30ad7d875afd-kube-api-access-tk2tt\") pod \"heat-operator-controller-manager-5f64f6f8bb-m26j4\" (UID: \"9e53a822-217b-4037-b378-30ad7d875afd\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.572408 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcmwk\" (UniqueName: \"kubernetes.io/projected/a74a4099-4a13-4a2c-bf5e-a9a18187ccfa-kube-api-access-fcmwk\") pod \"ironic-operator-controller-manager-6c548fd776-hk2qt\" (UID: \"a74a4099-4a13-4a2c-bf5e-a9a18187ccfa\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.576088 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.599934 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwpx9\" (UniqueName: \"kubernetes.io/projected/b13a9032-c937-442f-b305-c3b3d3fad395-kube-api-access-fwpx9\") pod \"barbican-operator-controller-manager-7d9dfd778-txx52\" (UID: \"b13a9032-c937-442f-b305-c3b3d3fad395\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.600361 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cvp2\" (UniqueName: \"kubernetes.io/projected/b25da2c0-4af8-4cf9-9c4d-1b15054e9b40-kube-api-access-5cvp2\") pod \"designate-operator-controller-manager-78b4bc895b-lmtl4\" (UID: \"b25da2c0-4af8-4cf9-9c4d-1b15054e9b40\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.600394 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5tgn\" (UniqueName: \"kubernetes.io/projected/e929a962-27ca-476f-9800-5bbd1f57a1d6-kube-api-access-t5tgn\") pod \"cinder-operator-controller-manager-859b6ccc6-tnlt9\" (UID: \"e929a962-27ca-476f-9800-5bbd1f57a1d6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.603239 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzkml\" (UniqueName: \"kubernetes.io/projected/bdb26248-b4db-48e7-8b0a-ecd525fae23e-kube-api-access-wzkml\") pod \"glance-operator-controller-manager-77987cd8cd-8qfkt\" (UID: \"bdb26248-b4db-48e7-8b0a-ecd525fae23e\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.604878 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk2tt\" (UniqueName: \"kubernetes.io/projected/9e53a822-217b-4037-b378-30ad7d875afd-kube-api-access-tk2tt\") pod \"heat-operator-controller-manager-5f64f6f8bb-m26j4\" (UID: \"9e53a822-217b-4037-b378-30ad7d875afd\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.612407 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.613598 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.615807 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-x7tdx" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.629470 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.637887 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.641189 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.642157 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.644026 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-64fvb" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.648749 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.650631 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.652251 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-8fvwb" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.653060 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.657524 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.665885 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-ds46q"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.666808 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.670714 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-wt7m4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675013 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz5ch\" (UniqueName: \"kubernetes.io/projected/92782842-40da-49c5-a384-383efcfd71e1-kube-api-access-mz5ch\") pod \"horizon-operator-controller-manager-68c6d99b8f-57wt5\" (UID: \"92782842-40da-49c5-a384-383efcfd71e1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675153 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcmwk\" (UniqueName: \"kubernetes.io/projected/a74a4099-4a13-4a2c-bf5e-a9a18187ccfa-kube-api-access-fcmwk\") pod \"ironic-operator-controller-manager-6c548fd776-hk2qt\" (UID: \"a74a4099-4a13-4a2c-bf5e-a9a18187ccfa\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675275 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-225dv\" (UniqueName: \"kubernetes.io/projected/3cc8a91f-1019-4d91-89fa-46eca439c2b3-kube-api-access-225dv\") pod \"keystone-operator-controller-manager-7765d96ddf-b4ff5\" (UID: \"3cc8a91f-1019-4d91-89fa-46eca439c2b3\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675351 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkjfj\" (UniqueName: \"kubernetes.io/projected/15d36563-8ee0-4701-8446-0fddc3b64d7a-kube-api-access-nkjfj\") pod \"nova-operator-controller-manager-697bc559fc-d28kq\" (UID: \"15d36563-8ee0-4701-8446-0fddc3b64d7a\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675426 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675491 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6v6h\" (UniqueName: \"kubernetes.io/projected/aad4570c-af5e-4ef6-b985-87eeab6d86be-kube-api-access-q6v6h\") pod \"manila-operator-controller-manager-7c79b5df47-dt6ws\" (UID: \"aad4570c-af5e-4ef6-b985-87eeab6d86be\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675598 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn57j\" (UniqueName: \"kubernetes.io/projected/e270d457-fc48-4d2c-ab72-328d8832260c-kube-api-access-nn57j\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-bn7wf\" (UID: \"e270d457-fc48-4d2c-ab72-328d8832260c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675677 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mr7b\" (UniqueName: \"kubernetes.io/projected/4bfee3f2-6683-4de8-9b17-765b4180603a-kube-api-access-4mr7b\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.675757 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrpdj\" (UniqueName: \"kubernetes.io/projected/c09fa74b-6157-478a-81ca-ef38b9ac40bf-kube-api-access-xrpdj\") pod \"mariadb-operator-controller-manager-56bbcc9d85-jzvpg\" (UID: \"c09fa74b-6157-478a-81ca-ef38b9ac40bf\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" Dec 05 05:41:02 crc kubenswrapper[4652]: E1205 05:41:02.676385 4652 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:02 crc kubenswrapper[4652]: E1205 05:41:02.676431 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert podName:4bfee3f2-6683-4de8-9b17-765b4180603a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:03.176417508 +0000 UTC m=+865.413147775 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert") pod "infra-operator-controller-manager-57548d458d-2qcwp" (UID: "4bfee3f2-6683-4de8-9b17-765b4180603a") : secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.677541 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.681432 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.682118 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-ds46q"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.685455 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-z2q7n" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.685617 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.691503 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.695279 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-225dv\" (UniqueName: \"kubernetes.io/projected/3cc8a91f-1019-4d91-89fa-46eca439c2b3-kube-api-access-225dv\") pod \"keystone-operator-controller-manager-7765d96ddf-b4ff5\" (UID: \"3cc8a91f-1019-4d91-89fa-46eca439c2b3\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.696060 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mr7b\" (UniqueName: \"kubernetes.io/projected/4bfee3f2-6683-4de8-9b17-765b4180603a-kube-api-access-4mr7b\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.696438 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6v6h\" (UniqueName: \"kubernetes.io/projected/aad4570c-af5e-4ef6-b985-87eeab6d86be-kube-api-access-q6v6h\") pod \"manila-operator-controller-manager-7c79b5df47-dt6ws\" (UID: \"aad4570c-af5e-4ef6-b985-87eeab6d86be\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.697099 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.698040 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.698859 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcmwk\" (UniqueName: \"kubernetes.io/projected/a74a4099-4a13-4a2c-bf5e-a9a18187ccfa-kube-api-access-fcmwk\") pod \"ironic-operator-controller-manager-6c548fd776-hk2qt\" (UID: \"a74a4099-4a13-4a2c-bf5e-a9a18187ccfa\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.700842 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-lfxwk" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.701229 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.710008 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.714924 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz5ch\" (UniqueName: \"kubernetes.io/projected/92782842-40da-49c5-a384-383efcfd71e1-kube-api-access-mz5ch\") pod \"horizon-operator-controller-manager-68c6d99b8f-57wt5\" (UID: \"92782842-40da-49c5-a384-383efcfd71e1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.715612 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-t5drn"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.716541 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.720888 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-24z8w" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.726281 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.730795 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-t5drn"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.737718 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.741891 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.749692 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.750542 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.752029 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-qg84x" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.753171 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.756152 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.776235 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.777738 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvx22\" (UniqueName: \"kubernetes.io/projected/83410e3b-8d96-4dbd-8392-72418cac098b-kube-api-access-lvx22\") pod \"octavia-operator-controller-manager-998648c74-ds46q\" (UID: \"83410e3b-8d96-4dbd-8392-72418cac098b\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.777789 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn57j\" (UniqueName: \"kubernetes.io/projected/e270d457-fc48-4d2c-ab72-328d8832260c-kube-api-access-nn57j\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-bn7wf\" (UID: \"e270d457-fc48-4d2c-ab72-328d8832260c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.777819 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2hx2\" (UniqueName: \"kubernetes.io/projected/1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48-kube-api-access-x2hx2\") pod \"swift-operator-controller-manager-5f8c65bbfc-ggrv8\" (UID: \"1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.777849 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrpdj\" (UniqueName: \"kubernetes.io/projected/c09fa74b-6157-478a-81ca-ef38b9ac40bf-kube-api-access-xrpdj\") pod \"mariadb-operator-controller-manager-56bbcc9d85-jzvpg\" (UID: \"c09fa74b-6157-478a-81ca-ef38b9ac40bf\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.777877 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qstv\" (UniqueName: \"kubernetes.io/projected/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-kube-api-access-8qstv\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.777938 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.777959 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pkn2\" (UniqueName: \"kubernetes.io/projected/3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5-kube-api-access-7pkn2\") pod \"placement-operator-controller-manager-78f8948974-t5drn\" (UID: \"3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.778102 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkjfj\" (UniqueName: \"kubernetes.io/projected/15d36563-8ee0-4701-8446-0fddc3b64d7a-kube-api-access-nkjfj\") pod \"nova-operator-controller-manager-697bc559fc-d28kq\" (UID: \"15d36563-8ee0-4701-8446-0fddc3b64d7a\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.778148 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxw2k\" (UniqueName: \"kubernetes.io/projected/5c49de75-9faf-4730-ae6f-29bc4fb36554-kube-api-access-fxw2k\") pod \"ovn-operator-controller-manager-b6456fdb6-pspnk\" (UID: \"5c49de75-9faf-4730-ae6f-29bc4fb36554\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.798354 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkjfj\" (UniqueName: \"kubernetes.io/projected/15d36563-8ee0-4701-8446-0fddc3b64d7a-kube-api-access-nkjfj\") pod \"nova-operator-controller-manager-697bc559fc-d28kq\" (UID: \"15d36563-8ee0-4701-8446-0fddc3b64d7a\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.800241 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrpdj\" (UniqueName: \"kubernetes.io/projected/c09fa74b-6157-478a-81ca-ef38b9ac40bf-kube-api-access-xrpdj\") pod \"mariadb-operator-controller-manager-56bbcc9d85-jzvpg\" (UID: \"c09fa74b-6157-478a-81ca-ef38b9ac40bf\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.809389 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn57j\" (UniqueName: \"kubernetes.io/projected/e270d457-fc48-4d2c-ab72-328d8832260c-kube-api-access-nn57j\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-bn7wf\" (UID: \"e270d457-fc48-4d2c-ab72-328d8832260c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.819183 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.821231 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.828256 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.829059 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.832726 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-6lqfh" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.865511 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.866128 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.880193 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pkn2\" (UniqueName: \"kubernetes.io/projected/3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5-kube-api-access-7pkn2\") pod \"placement-operator-controller-manager-78f8948974-t5drn\" (UID: \"3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.880288 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxw2k\" (UniqueName: \"kubernetes.io/projected/5c49de75-9faf-4730-ae6f-29bc4fb36554-kube-api-access-fxw2k\") pod \"ovn-operator-controller-manager-b6456fdb6-pspnk\" (UID: \"5c49de75-9faf-4730-ae6f-29bc4fb36554\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.880313 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvx22\" (UniqueName: \"kubernetes.io/projected/83410e3b-8d96-4dbd-8392-72418cac098b-kube-api-access-lvx22\") pod \"octavia-operator-controller-manager-998648c74-ds46q\" (UID: \"83410e3b-8d96-4dbd-8392-72418cac098b\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.880334 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2hx2\" (UniqueName: \"kubernetes.io/projected/1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48-kube-api-access-x2hx2\") pod \"swift-operator-controller-manager-5f8c65bbfc-ggrv8\" (UID: \"1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.880364 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qstv\" (UniqueName: \"kubernetes.io/projected/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-kube-api-access-8qstv\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.880398 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85z9k\" (UniqueName: \"kubernetes.io/projected/335d8f40-d377-44e9-93ba-e2ec0a5aa37e-kube-api-access-85z9k\") pod \"telemetry-operator-controller-manager-76cc84c6bb-pcfm9\" (UID: \"335d8f40-d377-44e9-93ba-e2ec0a5aa37e\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.880434 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:02 crc kubenswrapper[4652]: E1205 05:41:02.880540 4652 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:02 crc kubenswrapper[4652]: E1205 05:41:02.880595 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert podName:55dd49af-10d4-4392-99a1-fb7ae4f3f9d0 nodeName:}" failed. No retries permitted until 2025-12-05 05:41:03.380581836 +0000 UTC m=+865.617312102 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f58g8cr" (UID: "55dd49af-10d4-4392-99a1-fb7ae4f3f9d0") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.887274 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.888217 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.890740 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-nkln8" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.903428 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxw2k\" (UniqueName: \"kubernetes.io/projected/5c49de75-9faf-4730-ae6f-29bc4fb36554-kube-api-access-fxw2k\") pod \"ovn-operator-controller-manager-b6456fdb6-pspnk\" (UID: \"5c49de75-9faf-4730-ae6f-29bc4fb36554\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.906381 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pkn2\" (UniqueName: \"kubernetes.io/projected/3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5-kube-api-access-7pkn2\") pod \"placement-operator-controller-manager-78f8948974-t5drn\" (UID: \"3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.908416 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvx22\" (UniqueName: \"kubernetes.io/projected/83410e3b-8d96-4dbd-8392-72418cac098b-kube-api-access-lvx22\") pod \"octavia-operator-controller-manager-998648c74-ds46q\" (UID: \"83410e3b-8d96-4dbd-8392-72418cac098b\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.912145 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2hx2\" (UniqueName: \"kubernetes.io/projected/1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48-kube-api-access-x2hx2\") pod \"swift-operator-controller-manager-5f8c65bbfc-ggrv8\" (UID: \"1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.914217 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qstv\" (UniqueName: \"kubernetes.io/projected/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-kube-api-access-8qstv\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.921624 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.939955 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.957371 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.967496 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.981350 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t4xm\" (UniqueName: \"kubernetes.io/projected/220bfae2-7df4-4019-bf69-df29df3cecd9-kube-api-access-9t4xm\") pod \"test-operator-controller-manager-5854674fcc-x6pt9\" (UID: \"220bfae2-7df4-4019-bf69-df29df3cecd9\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.981435 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85z9k\" (UniqueName: \"kubernetes.io/projected/335d8f40-d377-44e9-93ba-e2ec0a5aa37e-kube-api-access-85z9k\") pod \"telemetry-operator-controller-manager-76cc84c6bb-pcfm9\" (UID: \"335d8f40-d377-44e9-93ba-e2ec0a5aa37e\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.987690 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.990328 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr"] Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.996430 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" Dec 05 05:41:02 crc kubenswrapper[4652]: I1205 05:41:02.998388 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-h2sl8" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:02.999538 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.013025 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85z9k\" (UniqueName: \"kubernetes.io/projected/335d8f40-d377-44e9-93ba-e2ec0a5aa37e-kube-api-access-85z9k\") pod \"telemetry-operator-controller-manager-76cc84c6bb-pcfm9\" (UID: \"335d8f40-d377-44e9-93ba-e2ec0a5aa37e\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.082481 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vp7b\" (UniqueName: \"kubernetes.io/projected/3c5d5a42-8c45-453b-87c5-46a78fcad90c-kube-api-access-8vp7b\") pod \"watcher-operator-controller-manager-769dc69bc-kggvr\" (UID: \"3c5d5a42-8c45-453b-87c5-46a78fcad90c\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.082513 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t4xm\" (UniqueName: \"kubernetes.io/projected/220bfae2-7df4-4019-bf69-df29df3cecd9-kube-api-access-9t4xm\") pod \"test-operator-controller-manager-5854674fcc-x6pt9\" (UID: \"220bfae2-7df4-4019-bf69-df29df3cecd9\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.104527 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.105323 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.112017 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.112864 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-rl9d8" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.113064 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t4xm\" (UniqueName: \"kubernetes.io/projected/220bfae2-7df4-4019-bf69-df29df3cecd9-kube-api-access-9t4xm\") pod \"test-operator-controller-manager-5854674fcc-x6pt9\" (UID: \"220bfae2-7df4-4019-bf69-df29df3cecd9\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.113314 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.113343 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.120712 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.131375 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.139224 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.148182 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.163922 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.186443 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.186481 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.186506 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8nm7\" (UniqueName: \"kubernetes.io/projected/2953519b-8365-4aa7-904e-a1b1c8ee525a-kube-api-access-b8nm7\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.186586 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.186610 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vp7b\" (UniqueName: \"kubernetes.io/projected/3c5d5a42-8c45-453b-87c5-46a78fcad90c-kube-api-access-8vp7b\") pod \"watcher-operator-controller-manager-769dc69bc-kggvr\" (UID: \"3c5d5a42-8c45-453b-87c5-46a78fcad90c\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.187132 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.187910 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.189130 4652 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.189187 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert podName:4bfee3f2-6683-4de8-9b17-765b4180603a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:04.189169354 +0000 UTC m=+866.425899621 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert") pod "infra-operator-controller-manager-57548d458d-2qcwp" (UID: "4bfee3f2-6683-4de8-9b17-765b4180603a") : secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.191086 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.199542 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.200089 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-zg6dm" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.207129 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vp7b\" (UniqueName: \"kubernetes.io/projected/3c5d5a42-8c45-453b-87c5-46a78fcad90c-kube-api-access-8vp7b\") pod \"watcher-operator-controller-manager-769dc69bc-kggvr\" (UID: \"3c5d5a42-8c45-453b-87c5-46a78fcad90c\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.219628 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.236685 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.289523 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn9dq\" (UniqueName: \"kubernetes.io/projected/22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e-kube-api-access-fn9dq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6lkb4\" (UID: \"22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.289612 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.289648 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.289677 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8nm7\" (UniqueName: \"kubernetes.io/projected/2953519b-8365-4aa7-904e-a1b1c8ee525a-kube-api-access-b8nm7\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.290144 4652 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.290221 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:03.79018698 +0000 UTC m=+866.026917247 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "metrics-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.290954 4652 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.291017 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:03.790999839 +0000 UTC m=+866.027730106 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "webhook-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.312347 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8nm7\" (UniqueName: \"kubernetes.io/projected/2953519b-8365-4aa7-904e-a1b1c8ee525a-kube-api-access-b8nm7\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.341170 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.380655 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.384084 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.387404 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt"] Dec 05 05:41:03 crc kubenswrapper[4652]: W1205 05:41:03.388113 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92782842_40da_49c5_a384_383efcfd71e1.slice/crio-7f6403163999c0e03266f48b1d5d8bcb88287a1b07ad637c17c0df6f8181b2ca WatchSource:0}: Error finding container 7f6403163999c0e03266f48b1d5d8bcb88287a1b07ad637c17c0df6f8181b2ca: Status 404 returned error can't find the container with id 7f6403163999c0e03266f48b1d5d8bcb88287a1b07ad637c17c0df6f8181b2ca Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.390819 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.390888 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn9dq\" (UniqueName: \"kubernetes.io/projected/22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e-kube-api-access-fn9dq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6lkb4\" (UID: \"22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.390954 4652 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.391002 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert podName:55dd49af-10d4-4392-99a1-fb7ae4f3f9d0 nodeName:}" failed. No retries permitted until 2025-12-05 05:41:04.390985435 +0000 UTC m=+866.627715702 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f58g8cr" (UID: "55dd49af-10d4-4392-99a1-fb7ae4f3f9d0") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: W1205 05:41:03.391159 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda74a4099_4a13_4a2c_bf5e_a9a18187ccfa.slice/crio-b1100d9deb040ada35a9867da5847fc148200f5e9a34f014db274ed012b31056 WatchSource:0}: Error finding container b1100d9deb040ada35a9867da5847fc148200f5e9a34f014db274ed012b31056: Status 404 returned error can't find the container with id b1100d9deb040ada35a9867da5847fc148200f5e9a34f014db274ed012b31056 Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.391928 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt"] Dec 05 05:41:03 crc kubenswrapper[4652]: W1205 05:41:03.393826 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb25da2c0_4af8_4cf9_9c4d_1b15054e9b40.slice/crio-9b5915aa29a2cd67aa90ae7f0df9f16b978ac725fda8f9326ec835dd4f869659 WatchSource:0}: Error finding container 9b5915aa29a2cd67aa90ae7f0df9f16b978ac725fda8f9326ec835dd4f869659: Status 404 returned error can't find the container with id 9b5915aa29a2cd67aa90ae7f0df9f16b978ac725fda8f9326ec835dd4f869659 Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.400634 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.404841 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.407236 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn9dq\" (UniqueName: \"kubernetes.io/projected/22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e-kube-api-access-fn9dq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6lkb4\" (UID: \"22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.525739 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.557201 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.562515 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.604535 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.611619 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf"] Dec 05 05:41:03 crc kubenswrapper[4652]: W1205 05:41:03.615777 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode270d457_fc48_4d2c_ab72_328d8832260c.slice/crio-61ed1a9ac798cbe35e957102b0bda936ec3e60e05d70197bf7c571c34c6354d1 WatchSource:0}: Error finding container 61ed1a9ac798cbe35e957102b0bda936ec3e60e05d70197bf7c571c34c6354d1: Status 404 returned error can't find the container with id 61ed1a9ac798cbe35e957102b0bda936ec3e60e05d70197bf7c571c34c6354d1 Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.615784 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-t5drn"] Dec 05 05:41:03 crc kubenswrapper[4652]: W1205 05:41:03.617005 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d7b292f_a7df_4f2f_a770_ae5ae5ee89e5.slice/crio-49dbb8ce87e74c661d8f942652951f8408e5411481e17ba827b164a9c5043cdc WatchSource:0}: Error finding container 49dbb8ce87e74c661d8f942652951f8408e5411481e17ba827b164a9c5043cdc: Status 404 returned error can't find the container with id 49dbb8ce87e74c661d8f942652951f8408e5411481e17ba827b164a9c5043cdc Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.634311 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7pkn2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-t5drn_openstack-operators(3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.636520 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7pkn2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-t5drn_openstack-operators(3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.638491 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" podUID="3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.642017 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-ds46q"] Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.655089 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lvx22,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-ds46q_openstack-operators(83410e3b-8d96-4dbd-8392-72418cac098b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.657021 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lvx22,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-ds46q_openstack-operators(83410e3b-8d96-4dbd-8392-72418cac098b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.658294 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" podUID="83410e3b-8d96-4dbd-8392-72418cac098b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.739649 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8"] Dec 05 05:41:03 crc kubenswrapper[4652]: W1205 05:41:03.744643 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1492e6c1_9ca5_43af_8c9b_dbcf4f3b6c48.slice/crio-b1b320c49c91509efe16590c7cbd8f235e409bff2c8cdb640ed5f644a70b48ae WatchSource:0}: Error finding container b1b320c49c91509efe16590c7cbd8f235e409bff2c8cdb640ed5f644a70b48ae: Status 404 returned error can't find the container with id b1b320c49c91509efe16590c7cbd8f235e409bff2c8cdb640ed5f644a70b48ae Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.746301 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk"] Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.751829 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9"] Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.761867 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-85z9k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-pcfm9_openstack-operators(335d8f40-d377-44e9-93ba-e2ec0a5aa37e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.762063 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fxw2k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-pspnk_openstack-operators(5c49de75-9faf-4730-ae6f-29bc4fb36554): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.762335 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x2hx2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-ggrv8_openstack-operators(1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.766117 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9"] Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.766759 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x2hx2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-ggrv8_openstack-operators(1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.766856 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-85z9k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-pcfm9_openstack-operators(335d8f40-d377-44e9-93ba-e2ec0a5aa37e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.766943 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fxw2k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-pspnk_openstack-operators(5c49de75-9faf-4730-ae6f-29bc4fb36554): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.768129 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" podUID="5c49de75-9faf-4730-ae6f-29bc4fb36554" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.768195 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" podUID="1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.768222 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" podUID="335d8f40-d377-44e9-93ba-e2ec0a5aa37e" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.775606 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr"] Dec 05 05:41:03 crc kubenswrapper[4652]: W1205 05:41:03.780320 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c5d5a42_8c45_453b_87c5_46a78fcad90c.slice/crio-e7def2ea97446ca3e3648910fd4c6fa558037d146201d4015facf9a4821f87c7 WatchSource:0}: Error finding container e7def2ea97446ca3e3648910fd4c6fa558037d146201d4015facf9a4821f87c7: Status 404 returned error can't find the container with id e7def2ea97446ca3e3648910fd4c6fa558037d146201d4015facf9a4821f87c7 Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.781451 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9t4xm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-x6pt9_openstack-operators(220bfae2-7df4-4019-bf69-df29df3cecd9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.783175 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8vp7b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-kggvr_openstack-operators(3c5d5a42-8c45-453b-87c5-46a78fcad90c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.783184 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9t4xm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-x6pt9_openstack-operators(220bfae2-7df4-4019-bf69-df29df3cecd9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.784779 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" podUID="220bfae2-7df4-4019-bf69-df29df3cecd9" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.785303 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8vp7b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-kggvr_openstack-operators(3c5d5a42-8c45-453b-87c5-46a78fcad90c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.786441 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" podUID="3c5d5a42-8c45-453b-87c5-46a78fcad90c" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.795306 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.795360 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.795417 4652 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.795471 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:04.795457236 +0000 UTC m=+867.032187503 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "metrics-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.795630 4652 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.795747 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:04.795731241 +0000 UTC m=+867.032461508 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "webhook-server-cert" not found Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.882043 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" event={"ID":"83410e3b-8d96-4dbd-8392-72418cac098b","Type":"ContainerStarted","Data":"bb500dbbf18fb48d612254b4d12c5c7de36c66d02748fa9e6c24b8f9baf8c35b"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.882894 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" event={"ID":"c09fa74b-6157-478a-81ca-ef38b9ac40bf","Type":"ContainerStarted","Data":"95382357299b31a75d7b282107b64ec9e42f3e511695e998d299b06effe1003d"} Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.883431 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" podUID="83410e3b-8d96-4dbd-8392-72418cac098b" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.883923 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" event={"ID":"b13a9032-c937-442f-b305-c3b3d3fad395","Type":"ContainerStarted","Data":"0b3477e030f6043ee08ee311d9e306e73f8c978f743141bce793211597bdff10"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.886251 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" event={"ID":"bdb26248-b4db-48e7-8b0a-ecd525fae23e","Type":"ContainerStarted","Data":"e0e4306c1c4b5e9a7e3be496c5fc5b38511db40bd5311c36fa0d128dcd8f3713"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.887406 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" event={"ID":"aad4570c-af5e-4ef6-b985-87eeab6d86be","Type":"ContainerStarted","Data":"1f98da00d471162f5b473e1c8ba8151494d346378e310ea20e6dab5ccf313c0c"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.888472 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" event={"ID":"e270d457-fc48-4d2c-ab72-328d8832260c","Type":"ContainerStarted","Data":"61ed1a9ac798cbe35e957102b0bda936ec3e60e05d70197bf7c571c34c6354d1"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.889528 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" event={"ID":"b25da2c0-4af8-4cf9-9c4d-1b15054e9b40","Type":"ContainerStarted","Data":"9b5915aa29a2cd67aa90ae7f0df9f16b978ac725fda8f9326ec835dd4f869659"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.890267 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" event={"ID":"3cc8a91f-1019-4d91-89fa-46eca439c2b3","Type":"ContainerStarted","Data":"38b7904501f5c6d7f117bd6cba8159e5097e0c45f4c75363995a408110791999"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.891217 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" event={"ID":"3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5","Type":"ContainerStarted","Data":"49dbb8ce87e74c661d8f942652951f8408e5411481e17ba827b164a9c5043cdc"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.892325 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" event={"ID":"e929a962-27ca-476f-9800-5bbd1f57a1d6","Type":"ContainerStarted","Data":"83e621adb2310b71cba77d9dcb5ae5e09363f7caf66a5c25a5406d57b5d35eed"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.894081 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" event={"ID":"3c5d5a42-8c45-453b-87c5-46a78fcad90c","Type":"ContainerStarted","Data":"e7def2ea97446ca3e3648910fd4c6fa558037d146201d4015facf9a4821f87c7"} Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.896093 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" podUID="3c5d5a42-8c45-453b-87c5-46a78fcad90c" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.896109 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" event={"ID":"5c49de75-9faf-4730-ae6f-29bc4fb36554","Type":"ContainerStarted","Data":"d04e08aa1ed30341e73e9dc58c5eef8fe550a004b3152ab22b6eefd76f829538"} Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.896151 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" podUID="3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.897118 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" event={"ID":"335d8f40-d377-44e9-93ba-e2ec0a5aa37e","Type":"ContainerStarted","Data":"02898c6f087f196898cf22aa5bdab52be77eb42b97897c096400628108d45bd9"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.898627 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" event={"ID":"92782842-40da-49c5-a384-383efcfd71e1","Type":"ContainerStarted","Data":"7f6403163999c0e03266f48b1d5d8bcb88287a1b07ad637c17c0df6f8181b2ca"} Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.899278 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" podUID="335d8f40-d377-44e9-93ba-e2ec0a5aa37e" Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.899501 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" podUID="5c49de75-9faf-4730-ae6f-29bc4fb36554" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.900144 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" event={"ID":"1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48","Type":"ContainerStarted","Data":"b1b320c49c91509efe16590c7cbd8f235e409bff2c8cdb640ed5f644a70b48ae"} Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.901596 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" podUID="1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.902544 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" event={"ID":"9e53a822-217b-4037-b378-30ad7d875afd","Type":"ContainerStarted","Data":"0df2b9591f1de114b08564c8dffd81e045e91172bc737a83d32a47c287173914"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.904153 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" event={"ID":"a74a4099-4a13-4a2c-bf5e-a9a18187ccfa","Type":"ContainerStarted","Data":"b1100d9deb040ada35a9867da5847fc148200f5e9a34f014db274ed012b31056"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.904808 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" event={"ID":"220bfae2-7df4-4019-bf69-df29df3cecd9","Type":"ContainerStarted","Data":"033057d10c1ca1ddecabf4042914973bb34f026b10fdd764c4430352d304e814"} Dec 05 05:41:03 crc kubenswrapper[4652]: E1205 05:41:03.912182 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" podUID="220bfae2-7df4-4019-bf69-df29df3cecd9" Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.915953 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" event={"ID":"15d36563-8ee0-4701-8446-0fddc3b64d7a","Type":"ContainerStarted","Data":"e010c77512ddfee23eb18ba003046e5730350f4f63cdb117f479604c1824a4e1"} Dec 05 05:41:03 crc kubenswrapper[4652]: I1205 05:41:03.951258 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4"] Dec 05 05:41:04 crc kubenswrapper[4652]: I1205 05:41:04.150819 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:41:04 crc kubenswrapper[4652]: I1205 05:41:04.150864 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:41:04 crc kubenswrapper[4652]: I1205 05:41:04.199072 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.199174 4652 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.199208 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert podName:4bfee3f2-6683-4de8-9b17-765b4180603a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:06.199195327 +0000 UTC m=+868.435925594 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert") pod "infra-operator-controller-manager-57548d458d-2qcwp" (UID: "4bfee3f2-6683-4de8-9b17-765b4180603a") : secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:04 crc kubenswrapper[4652]: I1205 05:41:04.400637 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.400855 4652 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.400898 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert podName:55dd49af-10d4-4392-99a1-fb7ae4f3f9d0 nodeName:}" failed. No retries permitted until 2025-12-05 05:41:06.400885251 +0000 UTC m=+868.637615518 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f58g8cr" (UID: "55dd49af-10d4-4392-99a1-fb7ae4f3f9d0") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:04 crc kubenswrapper[4652]: I1205 05:41:04.808921 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:04 crc kubenswrapper[4652]: I1205 05:41:04.809135 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.809094 4652 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.809242 4652 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.809252 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:06.809235566 +0000 UTC m=+869.045965833 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "metrics-server-cert" not found Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.809282 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:06.80926955 +0000 UTC m=+869.045999817 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "webhook-server-cert" not found Dec 05 05:41:04 crc kubenswrapper[4652]: I1205 05:41:04.924959 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4" event={"ID":"22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e","Type":"ContainerStarted","Data":"b701766988ee4d78d23254d36f44c98cd00200eeb7902763b1fcebafacfb1b6c"} Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.929315 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" podUID="220bfae2-7df4-4019-bf69-df29df3cecd9" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.929401 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" podUID="1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.929457 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" podUID="335d8f40-d377-44e9-93ba-e2ec0a5aa37e" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.930002 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" podUID="83410e3b-8d96-4dbd-8392-72418cac098b" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.930530 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" podUID="3c5d5a42-8c45-453b-87c5-46a78fcad90c" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.930730 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" podUID="3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5" Dec 05 05:41:04 crc kubenswrapper[4652]: E1205 05:41:04.931027 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" podUID="5c49de75-9faf-4730-ae6f-29bc4fb36554" Dec 05 05:41:06 crc kubenswrapper[4652]: I1205 05:41:06.228581 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:06 crc kubenswrapper[4652]: E1205 05:41:06.228748 4652 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:06 crc kubenswrapper[4652]: E1205 05:41:06.230090 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert podName:4bfee3f2-6683-4de8-9b17-765b4180603a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:10.230065618 +0000 UTC m=+872.466795886 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert") pod "infra-operator-controller-manager-57548d458d-2qcwp" (UID: "4bfee3f2-6683-4de8-9b17-765b4180603a") : secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:06 crc kubenswrapper[4652]: I1205 05:41:06.430878 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:06 crc kubenswrapper[4652]: E1205 05:41:06.431154 4652 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:06 crc kubenswrapper[4652]: E1205 05:41:06.431198 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert podName:55dd49af-10d4-4392-99a1-fb7ae4f3f9d0 nodeName:}" failed. No retries permitted until 2025-12-05 05:41:10.431183546 +0000 UTC m=+872.667913813 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f58g8cr" (UID: "55dd49af-10d4-4392-99a1-fb7ae4f3f9d0") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:06 crc kubenswrapper[4652]: I1205 05:41:06.834980 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:06 crc kubenswrapper[4652]: E1205 05:41:06.835188 4652 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 05:41:06 crc kubenswrapper[4652]: I1205 05:41:06.835239 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:06 crc kubenswrapper[4652]: E1205 05:41:06.835253 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:10.835237993 +0000 UTC m=+873.071968259 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "metrics-server-cert" not found Dec 05 05:41:06 crc kubenswrapper[4652]: E1205 05:41:06.835397 4652 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 05:41:06 crc kubenswrapper[4652]: E1205 05:41:06.835445 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:10.835432448 +0000 UTC m=+873.072162715 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "webhook-server-cert" not found Dec 05 05:41:10 crc kubenswrapper[4652]: I1205 05:41:10.273410 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:10 crc kubenswrapper[4652]: E1205 05:41:10.273565 4652 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:10 crc kubenswrapper[4652]: E1205 05:41:10.273916 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert podName:4bfee3f2-6683-4de8-9b17-765b4180603a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:18.273899344 +0000 UTC m=+880.510629612 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert") pod "infra-operator-controller-manager-57548d458d-2qcwp" (UID: "4bfee3f2-6683-4de8-9b17-765b4180603a") : secret "infra-operator-webhook-server-cert" not found Dec 05 05:41:10 crc kubenswrapper[4652]: I1205 05:41:10.479071 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:10 crc kubenswrapper[4652]: E1205 05:41:10.479273 4652 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:10 crc kubenswrapper[4652]: E1205 05:41:10.479338 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert podName:55dd49af-10d4-4392-99a1-fb7ae4f3f9d0 nodeName:}" failed. No retries permitted until 2025-12-05 05:41:18.479322319 +0000 UTC m=+880.716052585 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert") pod "openstack-baremetal-operator-controller-manager-55c85496f58g8cr" (UID: "55dd49af-10d4-4392-99a1-fb7ae4f3f9d0") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 05:41:10 crc kubenswrapper[4652]: I1205 05:41:10.884782 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:10 crc kubenswrapper[4652]: I1205 05:41:10.884851 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:10 crc kubenswrapper[4652]: E1205 05:41:10.884923 4652 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 05:41:10 crc kubenswrapper[4652]: E1205 05:41:10.884993 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:18.884975431 +0000 UTC m=+881.121705698 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "metrics-server-cert" not found Dec 05 05:41:10 crc kubenswrapper[4652]: E1205 05:41:10.885012 4652 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 05:41:10 crc kubenswrapper[4652]: E1205 05:41:10.885065 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:18.885051283 +0000 UTC m=+881.121781551 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "webhook-server-cert" not found Dec 05 05:41:13 crc kubenswrapper[4652]: E1205 05:41:13.109299 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xrpdj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-jzvpg_openstack-operators(c09fa74b-6157-478a-81ca-ef38b9ac40bf): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:13 crc kubenswrapper[4652]: E1205 05:41:13.111478 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" podUID="c09fa74b-6157-478a-81ca-ef38b9ac40bf" Dec 05 05:41:13 crc kubenswrapper[4652]: E1205 05:41:13.115808 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nn57j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-bn7wf_openstack-operators(e270d457-fc48-4d2c-ab72-328d8832260c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 05:41:13 crc kubenswrapper[4652]: E1205 05:41:13.117664 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" podUID="e270d457-fc48-4d2c-ab72-328d8832260c" Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.981808 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" event={"ID":"15d36563-8ee0-4701-8446-0fddc3b64d7a","Type":"ContainerStarted","Data":"f135ec8be2ff0262b22d18b532ba9646205b1f02d6df2d93a852a10ad84cec41"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.983737 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" event={"ID":"e929a962-27ca-476f-9800-5bbd1f57a1d6","Type":"ContainerStarted","Data":"102ac14fc46d07760101be9e273967fd35f9dc9e80a3ae9e4186943cd177ade9"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.985603 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4" event={"ID":"22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e","Type":"ContainerStarted","Data":"8fecf6c277b2488391ca068338b67cea9e6e248e7278d7f99fd7c2b6ccc9d406"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.987160 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" event={"ID":"9e53a822-217b-4037-b378-30ad7d875afd","Type":"ContainerStarted","Data":"8a26c955c1d79e4894e56686fbc2f75b4f12f64287ab9a232050d3803d91812f"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.988471 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" event={"ID":"b25da2c0-4af8-4cf9-9c4d-1b15054e9b40","Type":"ContainerStarted","Data":"ca76cb14209ab22e6551f145f796daddc161ce8f8e56a96898c57a010ea8aef4"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.989744 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" event={"ID":"a74a4099-4a13-4a2c-bf5e-a9a18187ccfa","Type":"ContainerStarted","Data":"c1349800b84bee7320b2d2e6cc8316a435dc317a7283e1a0a2347e153e0523bb"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.991078 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" event={"ID":"c09fa74b-6157-478a-81ca-ef38b9ac40bf","Type":"ContainerStarted","Data":"353a5293d8a51c36d370b3b2febcea46c918c0d137231e92e86e0d483e6c6dd5"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.991260 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" Dec 05 05:41:13 crc kubenswrapper[4652]: E1205 05:41:13.993603 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" podUID="c09fa74b-6157-478a-81ca-ef38b9ac40bf" Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.993825 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" event={"ID":"b13a9032-c937-442f-b305-c3b3d3fad395","Type":"ContainerStarted","Data":"c5dae75e1a4b62dccb5c38e791142aded84f7d6a28202d023ab882dd97e03f51"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.995945 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" event={"ID":"bdb26248-b4db-48e7-8b0a-ecd525fae23e","Type":"ContainerStarted","Data":"2829213d2b6d1ed0a8ac4096a342b1e3dcaaa808e784bb0ebd8834613970a492"} Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.998238 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6lkb4" podStartSLOduration=2.157828749 podStartE2EDuration="10.998187875s" podCreationTimestamp="2025-12-05 05:41:03 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.948096663 +0000 UTC m=+866.184826930" lastFinishedPulling="2025-12-05 05:41:12.788455789 +0000 UTC m=+875.025186056" observedRunningTime="2025-12-05 05:41:13.995396025 +0000 UTC m=+876.232126291" watchObservedRunningTime="2025-12-05 05:41:13.998187875 +0000 UTC m=+876.234918141" Dec 05 05:41:13 crc kubenswrapper[4652]: I1205 05:41:13.999027 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" event={"ID":"92782842-40da-49c5-a384-383efcfd71e1","Type":"ContainerStarted","Data":"1ec2d244f5ec058225cc1d7137f27f3860067a911a85171f94053b14b61f2be4"} Dec 05 05:41:14 crc kubenswrapper[4652]: I1205 05:41:14.001478 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" event={"ID":"aad4570c-af5e-4ef6-b985-87eeab6d86be","Type":"ContainerStarted","Data":"046b1baa46ab87999b1478055a534bfa6b1b690f47f9ae0e8bcd8bf4d1aa7af5"} Dec 05 05:41:14 crc kubenswrapper[4652]: I1205 05:41:14.003228 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" event={"ID":"e270d457-fc48-4d2c-ab72-328d8832260c","Type":"ContainerStarted","Data":"ffbabec261462fa5f2f05f85c0b631aef8e6d4ed208e0dab7618cdacac08a852"} Dec 05 05:41:14 crc kubenswrapper[4652]: I1205 05:41:14.003328 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" Dec 05 05:41:14 crc kubenswrapper[4652]: I1205 05:41:14.004748 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" event={"ID":"3cc8a91f-1019-4d91-89fa-46eca439c2b3","Type":"ContainerStarted","Data":"963be01eb5b1b17aa6592305bf0b65f47139aadf7d2a517dfbcad5ff42df7274"} Dec 05 05:41:14 crc kubenswrapper[4652]: E1205 05:41:14.005294 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" podUID="e270d457-fc48-4d2c-ab72-328d8832260c" Dec 05 05:41:15 crc kubenswrapper[4652]: E1205 05:41:15.013021 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" podUID="e270d457-fc48-4d2c-ab72-328d8832260c" Dec 05 05:41:15 crc kubenswrapper[4652]: E1205 05:41:15.013438 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" podUID="c09fa74b-6157-478a-81ca-ef38b9ac40bf" Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.036411 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" event={"ID":"92782842-40da-49c5-a384-383efcfd71e1","Type":"ContainerStarted","Data":"f88d4d35314838532195c4ff75b9ae989af10bcbbfa765e180144640be62d12c"} Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.036665 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.038244 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" event={"ID":"aad4570c-af5e-4ef6-b985-87eeab6d86be","Type":"ContainerStarted","Data":"791ef30539c7acfb990f3f8aad24ac2e72e4f4d9e3c6d35a04bbca3432f653a9"} Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.038678 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.042679 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" event={"ID":"9e53a822-217b-4037-b378-30ad7d875afd","Type":"ContainerStarted","Data":"5430d5f3ff7d6be2c155c73feb455464f644fbf15e4bfa78003341c75c02cb7d"} Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.042826 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.043892 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" event={"ID":"a74a4099-4a13-4a2c-bf5e-a9a18187ccfa","Type":"ContainerStarted","Data":"cfec855057809495cb813a95a706071dd79bcf022a8fc025bfd341b44d64a610"} Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.044196 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.050350 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" podStartSLOduration=1.7671389290000001 podStartE2EDuration="15.050340387s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.394080616 +0000 UTC m=+865.630810883" lastFinishedPulling="2025-12-05 05:41:16.677282074 +0000 UTC m=+878.914012341" observedRunningTime="2025-12-05 05:41:17.049076721 +0000 UTC m=+879.285806988" watchObservedRunningTime="2025-12-05 05:41:17.050340387 +0000 UTC m=+879.287070654" Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.077180 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" podStartSLOduration=1.9680916370000001 podStartE2EDuration="15.077165446s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.571186126 +0000 UTC m=+865.807916392" lastFinishedPulling="2025-12-05 05:41:16.680259935 +0000 UTC m=+878.916990201" observedRunningTime="2025-12-05 05:41:17.075871111 +0000 UTC m=+879.312601378" watchObservedRunningTime="2025-12-05 05:41:17.077165446 +0000 UTC m=+879.313895713" Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.092126 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" podStartSLOduration=1.791005862 podStartE2EDuration="15.092109088s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.395303285 +0000 UTC m=+865.632033552" lastFinishedPulling="2025-12-05 05:41:16.69640651 +0000 UTC m=+878.933136778" observedRunningTime="2025-12-05 05:41:17.090782434 +0000 UTC m=+879.327512702" watchObservedRunningTime="2025-12-05 05:41:17.092109088 +0000 UTC m=+879.328839356" Dec 05 05:41:17 crc kubenswrapper[4652]: I1205 05:41:17.110979 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" podStartSLOduration=1.85575918 podStartE2EDuration="15.11096482s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.410158532 +0000 UTC m=+865.646888799" lastFinishedPulling="2025-12-05 05:41:16.665364171 +0000 UTC m=+878.902094439" observedRunningTime="2025-12-05 05:41:17.108397211 +0000 UTC m=+879.345127478" watchObservedRunningTime="2025-12-05 05:41:17.11096482 +0000 UTC m=+879.347695087" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.050850 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" event={"ID":"e929a962-27ca-476f-9800-5bbd1f57a1d6","Type":"ContainerStarted","Data":"ec35dbb2be047cd09b28e00a875cc58d02224b9932b305464d9028d5b64ca6b7"} Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.051761 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.053736 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.054696 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" event={"ID":"b25da2c0-4af8-4cf9-9c4d-1b15054e9b40","Type":"ContainerStarted","Data":"af2f0d3ab4941cc065a451e9fd44fc711af7e824ad17b4c1a28301a3f7e265c7"} Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.054876 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.056172 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" event={"ID":"3cc8a91f-1019-4d91-89fa-46eca439c2b3","Type":"ContainerStarted","Data":"ae67ca2ec02ba8b053dc52bb629cb455f1916793982f94eaf20f749349d2b714"} Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.056789 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.058099 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.058334 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.058638 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" event={"ID":"15d36563-8ee0-4701-8446-0fddc3b64d7a","Type":"ContainerStarted","Data":"f8e59a6250bf3f976e9521bb33a640f5c86c1645ae2b797e5f5ac4b7e7bad5fb"} Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.059127 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.060328 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.060646 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" event={"ID":"b13a9032-c937-442f-b305-c3b3d3fad395","Type":"ContainerStarted","Data":"58c488881cae4c31ad80592a00500d837e41e8a24605aa1ddd811a2d3c39b095"} Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.060767 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.065275 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.067320 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" event={"ID":"bdb26248-b4db-48e7-8b0a-ecd525fae23e","Type":"ContainerStarted","Data":"98dea22f207b21890f8e29c5012a798bf0c12791682a587a6a68640fc66ff497"} Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.067622 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.068293 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-tnlt9" podStartSLOduration=2.550669969 podStartE2EDuration="16.068283981s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.250618176 +0000 UTC m=+865.487348443" lastFinishedPulling="2025-12-05 05:41:16.768232187 +0000 UTC m=+879.004962455" observedRunningTime="2025-12-05 05:41:18.064381312 +0000 UTC m=+880.301111578" watchObservedRunningTime="2025-12-05 05:41:18.068283981 +0000 UTC m=+880.305014248" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.068955 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.069314 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dt6ws" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.070981 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m26j4" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.072358 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hk2qt" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.072729 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-57wt5" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.090607 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-b4ff5" podStartSLOduration=2.756291907 podStartE2EDuration="16.090577005s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.407773677 +0000 UTC m=+865.644503944" lastFinishedPulling="2025-12-05 05:41:16.742058775 +0000 UTC m=+878.978789042" observedRunningTime="2025-12-05 05:41:18.078703376 +0000 UTC m=+880.315433643" watchObservedRunningTime="2025-12-05 05:41:18.090577005 +0000 UTC m=+880.327307273" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.099445 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lmtl4" podStartSLOduration=2.75377283 podStartE2EDuration="16.099430525s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.396247942 +0000 UTC m=+865.632978209" lastFinishedPulling="2025-12-05 05:41:16.741905637 +0000 UTC m=+878.978635904" observedRunningTime="2025-12-05 05:41:18.094810748 +0000 UTC m=+880.331541015" watchObservedRunningTime="2025-12-05 05:41:18.099430525 +0000 UTC m=+880.336160793" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.113468 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-d28kq" podStartSLOduration=2.960580017 podStartE2EDuration="16.113450201s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.61458909 +0000 UTC m=+865.851319357" lastFinishedPulling="2025-12-05 05:41:16.767459273 +0000 UTC m=+879.004189541" observedRunningTime="2025-12-05 05:41:18.109156977 +0000 UTC m=+880.345887245" watchObservedRunningTime="2025-12-05 05:41:18.113450201 +0000 UTC m=+880.350180469" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.141112 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-txx52" podStartSLOduration=2.540666397 podStartE2EDuration="16.141097045s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.190868219 +0000 UTC m=+865.427598486" lastFinishedPulling="2025-12-05 05:41:16.791298867 +0000 UTC m=+879.028029134" observedRunningTime="2025-12-05 05:41:18.140923038 +0000 UTC m=+880.377653305" watchObservedRunningTime="2025-12-05 05:41:18.141097045 +0000 UTC m=+880.377827312" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.212087 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-8qfkt" podStartSLOduration=2.274253173 podStartE2EDuration="16.212029273s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.39860793 +0000 UTC m=+865.635338197" lastFinishedPulling="2025-12-05 05:41:17.336384029 +0000 UTC m=+879.573114297" observedRunningTime="2025-12-05 05:41:18.205533546 +0000 UTC m=+880.442263813" watchObservedRunningTime="2025-12-05 05:41:18.212029273 +0000 UTC m=+880.448759540" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.303265 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.308259 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4bfee3f2-6683-4de8-9b17-765b4180603a-cert\") pod \"infra-operator-controller-manager-57548d458d-2qcwp\" (UID: \"4bfee3f2-6683-4de8-9b17-765b4180603a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.404987 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.508181 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.517294 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/55dd49af-10d4-4392-99a1-fb7ae4f3f9d0-cert\") pod \"openstack-baremetal-operator-controller-manager-55c85496f58g8cr\" (UID: \"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.700377 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.763286 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp"] Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.913194 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.913247 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:18 crc kubenswrapper[4652]: E1205 05:41:18.913358 4652 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 05:41:18 crc kubenswrapper[4652]: E1205 05:41:18.913411 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs podName:2953519b-8365-4aa7-904e-a1b1c8ee525a nodeName:}" failed. No retries permitted until 2025-12-05 05:41:34.913398417 +0000 UTC m=+897.150128684 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs") pod "openstack-operator-controller-manager-54bdf956c4-5qx5b" (UID: "2953519b-8365-4aa7-904e-a1b1c8ee525a") : secret "webhook-server-cert" not found Dec 05 05:41:18 crc kubenswrapper[4652]: I1205 05:41:18.916658 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-metrics-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:20 crc kubenswrapper[4652]: I1205 05:41:20.106315 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" event={"ID":"4bfee3f2-6683-4de8-9b17-765b4180603a","Type":"ContainerStarted","Data":"cc430eae1b73745d14f83ff57b0706ce2832c67fdf586c3a5d8048babaef2156"} Dec 05 05:41:21 crc kubenswrapper[4652]: I1205 05:41:21.705647 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr"] Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.135035 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" event={"ID":"335d8f40-d377-44e9-93ba-e2ec0a5aa37e","Type":"ContainerStarted","Data":"13cabaa99cab39c92623ca3389c22a2320e9c38a37e59b4697e772957e435a8d"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.135266 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" event={"ID":"335d8f40-d377-44e9-93ba-e2ec0a5aa37e","Type":"ContainerStarted","Data":"18bfe109ca4f24c870d2db63e03dbf42e8de7b817e3c39c111df887f98d4f803"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.135276 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" event={"ID":"1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48","Type":"ContainerStarted","Data":"66374c20dddeca89a94920d3c60f61a5d352c09d361d71134bacd23e87e857bc"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.135287 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" event={"ID":"1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48","Type":"ContainerStarted","Data":"9455ca169975d2306a686e1bfad8aab474a0736fba6616331ba53e645f052780"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.136245 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.136322 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.138980 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" event={"ID":"5c49de75-9faf-4730-ae6f-29bc4fb36554","Type":"ContainerStarted","Data":"e61323554e6d19580829300b351b4049aef749ff78cf0780ca07b40bc5dca805"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.139008 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" event={"ID":"5c49de75-9faf-4730-ae6f-29bc4fb36554","Type":"ContainerStarted","Data":"7a4c07db268d5accded174c1dfa8fe767532464b5d4a16ba542f0ce2bef1efb5"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.139570 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.141167 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" event={"ID":"3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5","Type":"ContainerStarted","Data":"2d944e581f0e36db03c016bbefe3d470cea3091d3a2f890bfe4b1d3af3bc8ed9"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.141200 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" event={"ID":"3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5","Type":"ContainerStarted","Data":"46c8c2da659a49eb827d7a2d295212573b550077c6d0af754297a64cd26332ea"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.141546 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.144658 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" event={"ID":"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0","Type":"ContainerStarted","Data":"2a947bcaec2be322fc7b7d3d0efa6ddf637d637916d6f4b4db69b39e68c0938c"} Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.155771 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" podStartSLOduration=2.560150037 podStartE2EDuration="20.155755643s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.761764161 +0000 UTC m=+865.998494429" lastFinishedPulling="2025-12-05 05:41:21.357369767 +0000 UTC m=+883.594100035" observedRunningTime="2025-12-05 05:41:22.153017274 +0000 UTC m=+884.389747540" watchObservedRunningTime="2025-12-05 05:41:22.155755643 +0000 UTC m=+884.392485911" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.169543 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" podStartSLOduration=2.443020561 podStartE2EDuration="20.169534176s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.634157592 +0000 UTC m=+865.870887858" lastFinishedPulling="2025-12-05 05:41:21.360671206 +0000 UTC m=+883.597401473" observedRunningTime="2025-12-05 05:41:22.168402368 +0000 UTC m=+884.405132634" watchObservedRunningTime="2025-12-05 05:41:22.169534176 +0000 UTC m=+884.406264443" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.179359 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" podStartSLOduration=2.5795624459999997 podStartE2EDuration="20.179350406s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.761758972 +0000 UTC m=+865.998489239" lastFinishedPulling="2025-12-05 05:41:21.361546933 +0000 UTC m=+883.598277199" observedRunningTime="2025-12-05 05:41:22.177893497 +0000 UTC m=+884.414623764" watchObservedRunningTime="2025-12-05 05:41:22.179350406 +0000 UTC m=+884.416080674" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.193094 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" podStartSLOduration=2.597440678 podStartE2EDuration="20.193079646s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.761745436 +0000 UTC m=+865.998475704" lastFinishedPulling="2025-12-05 05:41:21.357384405 +0000 UTC m=+883.594114672" observedRunningTime="2025-12-05 05:41:22.192655959 +0000 UTC m=+884.429386226" watchObservedRunningTime="2025-12-05 05:41:22.193079646 +0000 UTC m=+884.429809914" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.943527 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" Dec 05 05:41:22 crc kubenswrapper[4652]: I1205 05:41:22.959827 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.168617 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" event={"ID":"83410e3b-8d96-4dbd-8392-72418cac098b","Type":"ContainerStarted","Data":"852b58c2a836d90205503766f8bbc71ff258609b103da7df5946f0325cf20e00"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.168878 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" event={"ID":"83410e3b-8d96-4dbd-8392-72418cac098b","Type":"ContainerStarted","Data":"ad69365fac308e9c675582a4e767b9cd3870385c9188c21c5bfca9e2ff64eb5a"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.169014 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.170955 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" event={"ID":"e270d457-fc48-4d2c-ab72-328d8832260c","Type":"ContainerStarted","Data":"b7082140f82d0c5ae06003e6b753d503a5a79d6bd76c2907123e0020637d9c0d"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.173783 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" event={"ID":"c09fa74b-6157-478a-81ca-ef38b9ac40bf","Type":"ContainerStarted","Data":"b731216c9b9b8fadd6427e44f211f0f87f067c0db45379bddb51750c7ad8b0ce"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.175431 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" event={"ID":"220bfae2-7df4-4019-bf69-df29df3cecd9","Type":"ContainerStarted","Data":"1664793826e5a2f6d8c750498b55d7ce43e85dcf76dc7431b1b45a6fcaff9579"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.175518 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" event={"ID":"220bfae2-7df4-4019-bf69-df29df3cecd9","Type":"ContainerStarted","Data":"0c2573af5dbc05f622b2dfc720f59df785e8d7cf6ecfdfe39054ad45d58521ee"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.175684 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.177091 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" event={"ID":"3c5d5a42-8c45-453b-87c5-46a78fcad90c","Type":"ContainerStarted","Data":"56c5688898343908401400ea5b7c330c612cb4c73e74b3d880e4cab7601deee3"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.177118 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" event={"ID":"3c5d5a42-8c45-453b-87c5-46a78fcad90c","Type":"ContainerStarted","Data":"7b105a8527b49e29be79c1d130bf67d0b5912fa38e1505a181a9560282638191"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.177221 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.178426 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" event={"ID":"4bfee3f2-6683-4de8-9b17-765b4180603a","Type":"ContainerStarted","Data":"e18325e9d2e77cfb328d08e357eedb65e5f589d15d09cdb0ca6b58d3120c11b7"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.178501 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" event={"ID":"4bfee3f2-6683-4de8-9b17-765b4180603a","Type":"ContainerStarted","Data":"91174e3ce3a191057bf7a71f53fedda4b70759bc1602dc8c0c172de2ae982d1a"} Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.178585 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.183388 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" podStartSLOduration=2.397533328 podStartE2EDuration="23.183378336s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.654961735 +0000 UTC m=+865.891692003" lastFinishedPulling="2025-12-05 05:41:24.440806744 +0000 UTC m=+886.677537011" observedRunningTime="2025-12-05 05:41:25.179211809 +0000 UTC m=+887.415942076" watchObservedRunningTime="2025-12-05 05:41:25.183378336 +0000 UTC m=+887.420108602" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.191585 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" podStartSLOduration=17.806725382 podStartE2EDuration="23.191576363s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:19.076504388 +0000 UTC m=+881.313234655" lastFinishedPulling="2025-12-05 05:41:24.461355369 +0000 UTC m=+886.698085636" observedRunningTime="2025-12-05 05:41:25.190322494 +0000 UTC m=+887.427052761" watchObservedRunningTime="2025-12-05 05:41:25.191576363 +0000 UTC m=+887.428306630" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.204047 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-jzvpg" podStartSLOduration=14.009919203 podStartE2EDuration="23.204038299s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.570999123 +0000 UTC m=+865.807729391" lastFinishedPulling="2025-12-05 05:41:12.76511822 +0000 UTC m=+875.001848487" observedRunningTime="2025-12-05 05:41:25.202374089 +0000 UTC m=+887.439104356" watchObservedRunningTime="2025-12-05 05:41:25.204038299 +0000 UTC m=+887.440768566" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.218418 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" podStartSLOduration=2.558057814 podStartE2EDuration="23.218403564s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.78137888 +0000 UTC m=+866.018109147" lastFinishedPulling="2025-12-05 05:41:24.44172463 +0000 UTC m=+886.678454897" observedRunningTime="2025-12-05 05:41:25.214845423 +0000 UTC m=+887.451575690" watchObservedRunningTime="2025-12-05 05:41:25.218403564 +0000 UTC m=+887.455133831" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.246259 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" podStartSLOduration=2.602798175 podStartE2EDuration="23.246244203s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.782997995 +0000 UTC m=+866.019728262" lastFinishedPulling="2025-12-05 05:41:24.426444024 +0000 UTC m=+886.663174290" observedRunningTime="2025-12-05 05:41:25.233415145 +0000 UTC m=+887.470145412" watchObservedRunningTime="2025-12-05 05:41:25.246244203 +0000 UTC m=+887.482974469" Dec 05 05:41:25 crc kubenswrapper[4652]: I1205 05:41:25.247049 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-bn7wf" podStartSLOduration=14.12074253 podStartE2EDuration="23.247043766s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:03.631972933 +0000 UTC m=+865.868703200" lastFinishedPulling="2025-12-05 05:41:12.758274169 +0000 UTC m=+874.995004436" observedRunningTime="2025-12-05 05:41:25.245864859 +0000 UTC m=+887.482595126" watchObservedRunningTime="2025-12-05 05:41:25.247043766 +0000 UTC m=+887.483774033" Dec 05 05:41:32 crc kubenswrapper[4652]: I1205 05:41:32.211543 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" event={"ID":"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0","Type":"ContainerStarted","Data":"f1a0655a03d470a024f45a948b73370b6eea51633511da34e31f1fc51b7e02a7"} Dec 05 05:41:32 crc kubenswrapper[4652]: I1205 05:41:32.212069 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" event={"ID":"55dd49af-10d4-4392-99a1-fb7ae4f3f9d0","Type":"ContainerStarted","Data":"86540be32a1ec3409fb52598f170fec9c7eb7bdc5004594cf557e879ce1805d8"} Dec 05 05:41:32 crc kubenswrapper[4652]: I1205 05:41:32.212090 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:32 crc kubenswrapper[4652]: I1205 05:41:32.231034 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" podStartSLOduration=20.787803981 podStartE2EDuration="30.231025186s" podCreationTimestamp="2025-12-05 05:41:02 +0000 UTC" firstStartedPulling="2025-12-05 05:41:21.725732991 +0000 UTC m=+883.962463257" lastFinishedPulling="2025-12-05 05:41:31.168954195 +0000 UTC m=+893.405684462" observedRunningTime="2025-12-05 05:41:32.228869792 +0000 UTC m=+894.465600059" watchObservedRunningTime="2025-12-05 05:41:32.231025186 +0000 UTC m=+894.467755453" Dec 05 05:41:32 crc kubenswrapper[4652]: I1205 05:41:32.991228 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-ds46q" Dec 05 05:41:33 crc kubenswrapper[4652]: I1205 05:41:33.124076 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-pspnk" Dec 05 05:41:33 crc kubenswrapper[4652]: I1205 05:41:33.132951 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-t5drn" Dec 05 05:41:33 crc kubenswrapper[4652]: I1205 05:41:33.141863 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ggrv8" Dec 05 05:41:33 crc kubenswrapper[4652]: I1205 05:41:33.152840 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-pcfm9" Dec 05 05:41:33 crc kubenswrapper[4652]: I1205 05:41:33.221852 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-x6pt9" Dec 05 05:41:33 crc kubenswrapper[4652]: I1205 05:41:33.352076 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-kggvr" Dec 05 05:41:34 crc kubenswrapper[4652]: I1205 05:41:34.150485 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:41:34 crc kubenswrapper[4652]: I1205 05:41:34.150532 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:41:34 crc kubenswrapper[4652]: I1205 05:41:34.931211 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:34 crc kubenswrapper[4652]: I1205 05:41:34.935853 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2953519b-8365-4aa7-904e-a1b1c8ee525a-webhook-certs\") pod \"openstack-operator-controller-manager-54bdf956c4-5qx5b\" (UID: \"2953519b-8365-4aa7-904e-a1b1c8ee525a\") " pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:35 crc kubenswrapper[4652]: I1205 05:41:35.231866 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:35 crc kubenswrapper[4652]: I1205 05:41:35.597634 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b"] Dec 05 05:41:36 crc kubenswrapper[4652]: I1205 05:41:36.236239 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" event={"ID":"2953519b-8365-4aa7-904e-a1b1c8ee525a","Type":"ContainerStarted","Data":"de463363e1bac9606a2ee215f4050330aa1fa28e6235d8334e0e948672ef41eb"} Dec 05 05:41:36 crc kubenswrapper[4652]: I1205 05:41:36.237323 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:41:36 crc kubenswrapper[4652]: I1205 05:41:36.237346 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" event={"ID":"2953519b-8365-4aa7-904e-a1b1c8ee525a","Type":"ContainerStarted","Data":"bcec4955c2ecc75ebd32b6b05d0f3688958856321581a378b7208e91a3ff7394"} Dec 05 05:41:36 crc kubenswrapper[4652]: I1205 05:41:36.256866 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" podStartSLOduration=33.256853791 podStartE2EDuration="33.256853791s" podCreationTimestamp="2025-12-05 05:41:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:41:36.255394567 +0000 UTC m=+898.492124834" watchObservedRunningTime="2025-12-05 05:41:36.256853791 +0000 UTC m=+898.493584058" Dec 05 05:41:38 crc kubenswrapper[4652]: I1205 05:41:38.410786 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-2qcwp" Dec 05 05:41:38 crc kubenswrapper[4652]: I1205 05:41:38.705879 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-55c85496f58g8cr" Dec 05 05:41:45 crc kubenswrapper[4652]: I1205 05:41:45.236347 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-54bdf956c4-5qx5b" Dec 05 05:42:00 crc kubenswrapper[4652]: I1205 05:42:00.897868 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-9wk2l"] Dec 05 05:42:00 crc kubenswrapper[4652]: I1205 05:42:00.899347 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:00 crc kubenswrapper[4652]: I1205 05:42:00.901215 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 05:42:00 crc kubenswrapper[4652]: I1205 05:42:00.901959 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 05:42:00 crc kubenswrapper[4652]: I1205 05:42:00.902199 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-dtp5f" Dec 05 05:42:00 crc kubenswrapper[4652]: I1205 05:42:00.902277 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 05:42:00 crc kubenswrapper[4652]: I1205 05:42:00.912130 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-9wk2l"] Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.003968 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-t79sk"] Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.004964 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: W1205 05:42:01.006195 4652 reflector.go:561] object-"openstack"/"dns-svc": failed to list *v1.ConfigMap: configmaps "dns-svc" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 05 05:42:01 crc kubenswrapper[4652]: E1205 05:42:01.006229 4652 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"dns-svc\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"dns-svc\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.019188 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-t79sk"] Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.046349 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpfzj\" (UniqueName: \"kubernetes.io/projected/66a52801-4112-449d-a366-cdd147e56ca5-kube-api-access-fpfzj\") pod \"dnsmasq-dns-8468885bfc-9wk2l\" (UID: \"66a52801-4112-449d-a366-cdd147e56ca5\") " pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.046383 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66a52801-4112-449d-a366-cdd147e56ca5-config\") pod \"dnsmasq-dns-8468885bfc-9wk2l\" (UID: \"66a52801-4112-449d-a366-cdd147e56ca5\") " pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.147790 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpfzj\" (UniqueName: \"kubernetes.io/projected/66a52801-4112-449d-a366-cdd147e56ca5-kube-api-access-fpfzj\") pod \"dnsmasq-dns-8468885bfc-9wk2l\" (UID: \"66a52801-4112-449d-a366-cdd147e56ca5\") " pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.147829 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66a52801-4112-449d-a366-cdd147e56ca5-config\") pod \"dnsmasq-dns-8468885bfc-9wk2l\" (UID: \"66a52801-4112-449d-a366-cdd147e56ca5\") " pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.147888 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.147938 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-config\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.147973 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvxjf\" (UniqueName: \"kubernetes.io/projected/c13255f8-7735-451b-90d4-27cc4d0844b2-kube-api-access-tvxjf\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.148854 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66a52801-4112-449d-a366-cdd147e56ca5-config\") pod \"dnsmasq-dns-8468885bfc-9wk2l\" (UID: \"66a52801-4112-449d-a366-cdd147e56ca5\") " pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.164635 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpfzj\" (UniqueName: \"kubernetes.io/projected/66a52801-4112-449d-a366-cdd147e56ca5-kube-api-access-fpfzj\") pod \"dnsmasq-dns-8468885bfc-9wk2l\" (UID: \"66a52801-4112-449d-a366-cdd147e56ca5\") " pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.216290 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.249036 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-config\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.250083 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvxjf\" (UniqueName: \"kubernetes.io/projected/c13255f8-7735-451b-90d4-27cc4d0844b2-kube-api-access-tvxjf\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.250515 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.250025 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-config\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.263433 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvxjf\" (UniqueName: \"kubernetes.io/projected/c13255f8-7735-451b-90d4-27cc4d0844b2-kube-api-access-tvxjf\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:01 crc kubenswrapper[4652]: I1205 05:42:01.568983 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-9wk2l"] Dec 05 05:42:01 crc kubenswrapper[4652]: W1205 05:42:01.572338 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66a52801_4112_449d_a366_cdd147e56ca5.slice/crio-9782d931eb8f2be94bca8216cae668a20d1e4bd965014c029b3a776b5c0b742a WatchSource:0}: Error finding container 9782d931eb8f2be94bca8216cae668a20d1e4bd965014c029b3a776b5c0b742a: Status 404 returned error can't find the container with id 9782d931eb8f2be94bca8216cae668a20d1e4bd965014c029b3a776b5c0b742a Dec 05 05:42:02 crc kubenswrapper[4652]: E1205 05:42:02.250764 4652 configmap.go:193] Couldn't get configMap openstack/dns-svc: failed to sync configmap cache: timed out waiting for the condition Dec 05 05:42:02 crc kubenswrapper[4652]: E1205 05:42:02.250831 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc podName:c13255f8-7735-451b-90d4-27cc4d0844b2 nodeName:}" failed. No retries permitted until 2025-12-05 05:42:02.750816471 +0000 UTC m=+924.987546737 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc") pod "dnsmasq-dns-545d49fd5c-t79sk" (UID: "c13255f8-7735-451b-90d4-27cc4d0844b2") : failed to sync configmap cache: timed out waiting for the condition Dec 05 05:42:02 crc kubenswrapper[4652]: I1205 05:42:02.375488 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" event={"ID":"66a52801-4112-449d-a366-cdd147e56ca5","Type":"ContainerStarted","Data":"9782d931eb8f2be94bca8216cae668a20d1e4bd965014c029b3a776b5c0b742a"} Dec 05 05:42:02 crc kubenswrapper[4652]: I1205 05:42:02.398605 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 05:42:02 crc kubenswrapper[4652]: I1205 05:42:02.767072 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:02 crc kubenswrapper[4652]: I1205 05:42:02.768047 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc\") pod \"dnsmasq-dns-545d49fd5c-t79sk\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:02 crc kubenswrapper[4652]: I1205 05:42:02.817791 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:03 crc kubenswrapper[4652]: I1205 05:42:03.232891 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-t79sk"] Dec 05 05:42:03 crc kubenswrapper[4652]: I1205 05:42:03.382721 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" event={"ID":"c13255f8-7735-451b-90d4-27cc4d0844b2","Type":"ContainerStarted","Data":"365093c14fc2c737feb3e2541cca605e2c50da57d13b4dfeb12920268e63aa38"} Dec 05 05:42:04 crc kubenswrapper[4652]: I1205 05:42:04.150147 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:42:04 crc kubenswrapper[4652]: I1205 05:42:04.150193 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:42:04 crc kubenswrapper[4652]: I1205 05:42:04.150224 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:42:04 crc kubenswrapper[4652]: I1205 05:42:04.150599 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"30c3258c2639df7b2105ca40ba8c89dc3f283f9909c72ed9341ff06120095f7d"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:42:04 crc kubenswrapper[4652]: I1205 05:42:04.150640 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://30c3258c2639df7b2105ca40ba8c89dc3f283f9909c72ed9341ff06120095f7d" gracePeriod=600 Dec 05 05:42:04 crc kubenswrapper[4652]: I1205 05:42:04.393738 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="30c3258c2639df7b2105ca40ba8c89dc3f283f9909c72ed9341ff06120095f7d" exitCode=0 Dec 05 05:42:04 crc kubenswrapper[4652]: I1205 05:42:04.393763 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"30c3258c2639df7b2105ca40ba8c89dc3f283f9909c72ed9341ff06120095f7d"} Dec 05 05:42:04 crc kubenswrapper[4652]: I1205 05:42:04.393974 4652 scope.go:117] "RemoveContainer" containerID="d6fbad51d9fa83b6ff2e59f87dc8d974ebb1fcdb8dee5f5d5ecbd852c4da84af" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.073411 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-t79sk"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.102707 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b9b4959cc-6sfnn"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.103830 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.107452 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b9b4959cc-6sfnn"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.199602 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-config\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.199694 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb9lx\" (UniqueName: \"kubernetes.io/projected/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-kube-api-access-qb9lx\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.199905 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-dns-svc\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.301007 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-config\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.301066 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb9lx\" (UniqueName: \"kubernetes.io/projected/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-kube-api-access-qb9lx\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.301132 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-dns-svc\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.302198 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-dns-svc\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.302310 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-config\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.326181 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-9wk2l"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.346365 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb9lx\" (UniqueName: \"kubernetes.io/projected/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-kube-api-access-qb9lx\") pod \"dnsmasq-dns-b9b4959cc-6sfnn\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.353184 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-lvk6d"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.354987 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.361810 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-lvk6d"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.402204 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhmt9\" (UniqueName: \"kubernetes.io/projected/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-kube-api-access-mhmt9\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.402285 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-config\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.402312 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-dns-svc\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.416699 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"0e8d98548aa27c73c99040f551d5e2c229be9c9f00418747bea9244c8abd5fdd"} Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.424254 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.504159 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-config\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.504264 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-dns-svc\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.504405 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhmt9\" (UniqueName: \"kubernetes.io/projected/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-kube-api-access-mhmt9\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.510712 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-config\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.510892 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-dns-svc\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.528777 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhmt9\" (UniqueName: \"kubernetes.io/projected/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-kube-api-access-mhmt9\") pod \"dnsmasq-dns-86b8f4ff9-lvk6d\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.587065 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-lvk6d"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.587505 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.604782 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5449989c59-vnpx6"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.605761 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.618673 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-vnpx6"] Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.714029 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-config\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.714307 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs2xx\" (UniqueName: \"kubernetes.io/projected/d89b380c-d2c9-431f-8034-1002ce6bf244-kube-api-access-vs2xx\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.714409 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-dns-svc\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.815612 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-dns-svc\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.815663 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-config\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.815682 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs2xx\" (UniqueName: \"kubernetes.io/projected/d89b380c-d2c9-431f-8034-1002ce6bf244-kube-api-access-vs2xx\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.816796 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-dns-svc\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.816858 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-config\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.832586 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs2xx\" (UniqueName: \"kubernetes.io/projected/d89b380c-d2c9-431f-8034-1002ce6bf244-kube-api-access-vs2xx\") pod \"dnsmasq-dns-5449989c59-vnpx6\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:05 crc kubenswrapper[4652]: I1205 05:42:05.932036 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.054477 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b9b4959cc-6sfnn"] Dec 05 05:42:06 crc kubenswrapper[4652]: W1205 05:42:06.059586 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc2335c5_9634_47bf_ae9d_b0cd9454d3b1.slice/crio-3da92ca10c3898eeab69f240afa8ebf58c7ec85ac641d96bc223aebcd092dcc2 WatchSource:0}: Error finding container 3da92ca10c3898eeab69f240afa8ebf58c7ec85ac641d96bc223aebcd092dcc2: Status 404 returned error can't find the container with id 3da92ca10c3898eeab69f240afa8ebf58c7ec85ac641d96bc223aebcd092dcc2 Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.140980 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-lvk6d"] Dec 05 05:42:06 crc kubenswrapper[4652]: W1205 05:42:06.152226 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f3358cf_54b3_4daa_a48c_6c3dd961ceb0.slice/crio-5ec942a02118d87548f33137e353b5ca0e98c6cd07cf373b219026f456b15851 WatchSource:0}: Error finding container 5ec942a02118d87548f33137e353b5ca0e98c6cd07cf373b219026f456b15851: Status 404 returned error can't find the container with id 5ec942a02118d87548f33137e353b5ca0e98c6cd07cf373b219026f456b15851 Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.230442 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.231584 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.235851 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.236883 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.236989 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.237288 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.237410 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.237457 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.237513 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.237658 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-kjc7q" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.325180 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-config-data\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.325219 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.325240 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4aa077b9-9612-44cf-b163-d0c1f9468787-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.325618 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.325734 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.325815 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.325920 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4aa077b9-9612-44cf-b163-d0c1f9468787-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.326037 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqkz8\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-kube-api-access-wqkz8\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.326067 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.326100 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.326165 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.344198 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-vnpx6"] Dec 05 05:42:06 crc kubenswrapper[4652]: W1205 05:42:06.355083 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd89b380c_d2c9_431f_8034_1002ce6bf244.slice/crio-5ae1061e8e65959194fd8c1792ca842cd6c02b1b4c85157e567a87bb4f217eb1 WatchSource:0}: Error finding container 5ae1061e8e65959194fd8c1792ca842cd6c02b1b4c85157e567a87bb4f217eb1: Status 404 returned error can't find the container with id 5ae1061e8e65959194fd8c1792ca842cd6c02b1b4c85157e567a87bb4f217eb1 Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427577 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-config-data\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427618 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427639 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4aa077b9-9612-44cf-b163-d0c1f9468787-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427657 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427682 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427705 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427740 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4aa077b9-9612-44cf-b163-d0c1f9468787-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427796 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqkz8\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-kube-api-access-wqkz8\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427815 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427833 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.427866 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.428321 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.428431 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-config-data\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.429626 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.430316 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.432640 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" event={"ID":"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1","Type":"ContainerStarted","Data":"3da92ca10c3898eeab69f240afa8ebf58c7ec85ac641d96bc223aebcd092dcc2"} Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.433019 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.433570 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.434571 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449989c59-vnpx6" event={"ID":"d89b380c-d2c9-431f-8034-1002ce6bf244","Type":"ContainerStarted","Data":"5ae1061e8e65959194fd8c1792ca842cd6c02b1b4c85157e567a87bb4f217eb1"} Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.435094 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.436432 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.438812 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4aa077b9-9612-44cf-b163-d0c1f9468787-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.439349 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" event={"ID":"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0","Type":"ContainerStarted","Data":"5ec942a02118d87548f33137e353b5ca0e98c6cd07cf373b219026f456b15851"} Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.443860 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4aa077b9-9612-44cf-b163-d0c1f9468787-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.453709 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.459518 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqkz8\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-kube-api-access-wqkz8\") pod \"rabbitmq-server-0\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.474064 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.475888 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.484046 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.484247 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-twzml" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.484244 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.484338 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.484430 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.484649 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.484153 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.525618 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.528779 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.528944 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529141 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529176 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529225 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsbjj\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-kube-api-access-fsbjj\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529256 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529277 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529294 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529333 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529361 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.529390 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.555017 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630499 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630543 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630582 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsbjj\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-kube-api-access-fsbjj\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630609 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630632 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630651 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630691 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630724 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630766 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630827 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.630865 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.631043 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.632185 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.632303 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.632337 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.632365 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.633189 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.635417 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.635879 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.648613 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.649228 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.682029 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.690216 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsbjj\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-kube-api-access-fsbjj\") pod \"rabbitmq-cell1-server-0\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.727851 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.729742 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.733884 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-plugins-conf" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.733963 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-default-user" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.734043 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-erlang-cookie" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.734121 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-server-conf" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.734231 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-config-data" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.734368 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-notifications-svc" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.735279 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-server-dockercfg-sjq4m" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.753335 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.809101 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.834950 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835829 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbh4t\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-kube-api-access-hbh4t\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835860 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835876 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835902 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/86f82531-5219-4cd8-9432-1e8dc2a73b08-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835922 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835941 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835967 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835981 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.835994 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.836029 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.836075 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/86f82531-5219-4cd8-9432-1e8dc2a73b08-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: W1205 05:42:06.878768 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice/crio-4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f WatchSource:0}: Error finding container 4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f: Status 404 returned error can't find the container with id 4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938284 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/86f82531-5219-4cd8-9432-1e8dc2a73b08-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938337 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938371 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938417 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938437 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938456 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938510 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938590 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/86f82531-5219-4cd8-9432-1e8dc2a73b08-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938678 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbh4t\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-kube-api-access-hbh4t\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938683 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938701 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.938723 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.939937 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.941297 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.941400 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.941590 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.941679 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/86f82531-5219-4cd8-9432-1e8dc2a73b08-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.942057 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/86f82531-5219-4cd8-9432-1e8dc2a73b08-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.942960 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/86f82531-5219-4cd8-9432-1e8dc2a73b08-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.943296 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.943447 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.953520 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbh4t\" (UniqueName: \"kubernetes.io/projected/86f82531-5219-4cd8-9432-1e8dc2a73b08-kube-api-access-hbh4t\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:06 crc kubenswrapper[4652]: I1205 05:42:06.964417 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"86f82531-5219-4cd8-9432-1e8dc2a73b08\") " pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:07 crc kubenswrapper[4652]: I1205 05:42:07.070143 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:42:07 crc kubenswrapper[4652]: I1205 05:42:07.284593 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:42:07 crc kubenswrapper[4652]: W1205 05:42:07.288651 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f7bc86b_3d63_41b5_b116_5b638d1c9f7f.slice/crio-3a3d26c0302ddf0c697b0e8e287faa59e64929d49ebdce99e712c2ea3b10921f WatchSource:0}: Error finding container 3a3d26c0302ddf0c697b0e8e287faa59e64929d49ebdce99e712c2ea3b10921f: Status 404 returned error can't find the container with id 3a3d26c0302ddf0c697b0e8e287faa59e64929d49ebdce99e712c2ea3b10921f Dec 05 05:42:07 crc kubenswrapper[4652]: I1205 05:42:07.458906 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4aa077b9-9612-44cf-b163-d0c1f9468787","Type":"ContainerStarted","Data":"4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f"} Dec 05 05:42:07 crc kubenswrapper[4652]: I1205 05:42:07.460761 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f","Type":"ContainerStarted","Data":"3a3d26c0302ddf0c697b0e8e287faa59e64929d49ebdce99e712c2ea3b10921f"} Dec 05 05:42:07 crc kubenswrapper[4652]: I1205 05:42:07.490050 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.474158 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"86f82531-5219-4cd8-9432-1e8dc2a73b08","Type":"ContainerStarted","Data":"5f4ffdee9a99ac5864ac996b137a86e8871c791a04eb500be6092c12e9922cc1"} Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.880520 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.883166 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.885105 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.885362 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.885492 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-xbhl4" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.885664 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.888219 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.890783 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.994456 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.994499 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.994521 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.994549 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-497cb\" (UniqueName: \"kubernetes.io/projected/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-kube-api-access-497cb\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.994633 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-config-data-default\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.994664 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.994720 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:08 crc kubenswrapper[4652]: I1205 05:42:08.994828 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-kolla-config\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.096438 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.096499 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-497cb\" (UniqueName: \"kubernetes.io/projected/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-kube-api-access-497cb\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.096542 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-config-data-default\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.096574 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.098896 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.098947 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-kolla-config\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.098987 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.099068 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.099462 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.107126 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.107421 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-config-data-default\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.107894 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-kolla-config\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.115775 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.118310 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-497cb\" (UniqueName: \"kubernetes.io/projected/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-kube-api-access-497cb\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.127201 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.134093 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d16da2b5-fe11-4ded-9722-94f4ddb2c8e1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.140711 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1\") " pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.206380 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 05:42:09 crc kubenswrapper[4652]: I1205 05:42:09.666923 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 05:42:09 crc kubenswrapper[4652]: W1205 05:42:09.727521 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd16da2b5_fe11_4ded_9722_94f4ddb2c8e1.slice/crio-cc16b30cdaa939691a9e9dc47f636375cd561d2d5a23f97477efa2c4300dc2f6 WatchSource:0}: Error finding container cc16b30cdaa939691a9e9dc47f636375cd561d2d5a23f97477efa2c4300dc2f6: Status 404 returned error can't find the container with id cc16b30cdaa939691a9e9dc47f636375cd561d2d5a23f97477efa2c4300dc2f6 Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.181276 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.185008 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.187857 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.188687 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-s5drk" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.189487 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.190261 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.196529 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.315548 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.315607 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.315633 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.315659 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.315679 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.315717 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkst5\" (UniqueName: \"kubernetes.io/projected/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-kube-api-access-kkst5\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.315774 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.315797 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.417750 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkst5\" (UniqueName: \"kubernetes.io/projected/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-kube-api-access-kkst5\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.417827 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.417863 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.417921 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.420311 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.420336 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.420364 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.420385 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.419719 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.418193 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.420745 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.421817 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.422285 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.425503 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.430339 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.445012 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkst5\" (UniqueName: \"kubernetes.io/projected/b12802a1-fea7-4427-9a10-7c6b2ac6c5bc-kube-api-access-kkst5\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.445935 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.495042 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1","Type":"ContainerStarted","Data":"cc16b30cdaa939691a9e9dc47f636375cd561d2d5a23f97477efa2c4300dc2f6"} Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.513604 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.542050 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.542969 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.546922 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.547133 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-j5pnz" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.549055 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.559599 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.623118 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38d81e02-71a7-4093-b84c-135254187f85-config-data\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.623179 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gb9g\" (UniqueName: \"kubernetes.io/projected/38d81e02-71a7-4093-b84c-135254187f85-kube-api-access-5gb9g\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.623218 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/38d81e02-71a7-4093-b84c-135254187f85-memcached-tls-certs\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.623263 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38d81e02-71a7-4093-b84c-135254187f85-combined-ca-bundle\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.623366 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/38d81e02-71a7-4093-b84c-135254187f85-kolla-config\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.724183 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38d81e02-71a7-4093-b84c-135254187f85-config-data\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.724235 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gb9g\" (UniqueName: \"kubernetes.io/projected/38d81e02-71a7-4093-b84c-135254187f85-kube-api-access-5gb9g\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.724264 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/38d81e02-71a7-4093-b84c-135254187f85-memcached-tls-certs\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.724292 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38d81e02-71a7-4093-b84c-135254187f85-combined-ca-bundle\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.724333 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/38d81e02-71a7-4093-b84c-135254187f85-kolla-config\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.725347 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/38d81e02-71a7-4093-b84c-135254187f85-kolla-config\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.726146 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/38d81e02-71a7-4093-b84c-135254187f85-config-data\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.728371 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38d81e02-71a7-4093-b84c-135254187f85-combined-ca-bundle\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.728720 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/38d81e02-71a7-4093-b84c-135254187f85-memcached-tls-certs\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.736983 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gb9g\" (UniqueName: \"kubernetes.io/projected/38d81e02-71a7-4093-b84c-135254187f85-kube-api-access-5gb9g\") pod \"memcached-0\" (UID: \"38d81e02-71a7-4093-b84c-135254187f85\") " pod="openstack/memcached-0" Dec 05 05:42:10 crc kubenswrapper[4652]: I1205 05:42:10.888447 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 05:42:11 crc kubenswrapper[4652]: I1205 05:42:11.051807 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 05:42:11 crc kubenswrapper[4652]: I1205 05:42:11.361823 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 05:42:11 crc kubenswrapper[4652]: I1205 05:42:11.514331 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"38d81e02-71a7-4093-b84c-135254187f85","Type":"ContainerStarted","Data":"0150e3f9107f34d82498652c59ee50d45552f6ab209dbc477d4ef83d12cc8b47"} Dec 05 05:42:11 crc kubenswrapper[4652]: I1205 05:42:11.515723 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc","Type":"ContainerStarted","Data":"b0e2bb6d0aace3920a12327776f6d76f93ec05e14f4c06641d36f682f7a2e4b9"} Dec 05 05:42:12 crc kubenswrapper[4652]: I1205 05:42:12.595774 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:42:12 crc kubenswrapper[4652]: I1205 05:42:12.597177 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 05:42:12 crc kubenswrapper[4652]: I1205 05:42:12.599847 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-sj9vz" Dec 05 05:42:12 crc kubenswrapper[4652]: I1205 05:42:12.606966 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:42:12 crc kubenswrapper[4652]: I1205 05:42:12.680189 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmzvx\" (UniqueName: \"kubernetes.io/projected/d88dcba1-b955-4ada-b5ae-c98555a02260-kube-api-access-vmzvx\") pod \"kube-state-metrics-0\" (UID: \"d88dcba1-b955-4ada-b5ae-c98555a02260\") " pod="openstack/kube-state-metrics-0" Dec 05 05:42:12 crc kubenswrapper[4652]: I1205 05:42:12.784439 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmzvx\" (UniqueName: \"kubernetes.io/projected/d88dcba1-b955-4ada-b5ae-c98555a02260-kube-api-access-vmzvx\") pod \"kube-state-metrics-0\" (UID: \"d88dcba1-b955-4ada-b5ae-c98555a02260\") " pod="openstack/kube-state-metrics-0" Dec 05 05:42:12 crc kubenswrapper[4652]: I1205 05:42:12.801302 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmzvx\" (UniqueName: \"kubernetes.io/projected/d88dcba1-b955-4ada-b5ae-c98555a02260-kube-api-access-vmzvx\") pod \"kube-state-metrics-0\" (UID: \"d88dcba1-b955-4ada-b5ae-c98555a02260\") " pod="openstack/kube-state-metrics-0" Dec 05 05:42:12 crc kubenswrapper[4652]: I1205 05:42:12.931955 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.377865 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.530924 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d88dcba1-b955-4ada-b5ae-c98555a02260","Type":"ContainerStarted","Data":"cb2edbf86e11ca5583760eddbb81ebf39674fa25b097c9da7ce58cab3c574abe"} Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.822138 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.823980 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.826955 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.827230 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.827611 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.827935 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.828032 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-bj7vd" Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.835155 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:42:13 crc kubenswrapper[4652]: I1205 05:42:13.848595 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.018006 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.018049 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.018121 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.018824 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.018865 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6pxg\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-kube-api-access-n6pxg\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.018946 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.018982 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.019004 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.120591 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.120633 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6pxg\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-kube-api-access-n6pxg\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.120682 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.120725 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.120769 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.123697 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.123946 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.123995 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.125477 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.126681 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.128091 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.130026 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.130195 4652 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.130224 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0495d50add23caafd61e72bca8d5e7274e8f1a3737d1b608160f79f869a86c50/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.143797 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.144434 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.153591 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6pxg\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-kube-api-access-n6pxg\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.178222 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:14 crc kubenswrapper[4652]: I1205 05:42:14.448776 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 05:42:15 crc kubenswrapper[4652]: I1205 05:42:15.337946 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:42:15 crc kubenswrapper[4652]: W1205 05:42:15.779276 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfd617e6_2edb_4ed2_9b66_fd8893ae9427.slice/crio-5162d422e5479ab753bd4a69cdb9f1365355e355d1ef413f98fc496785ffe3e2 WatchSource:0}: Error finding container 5162d422e5479ab753bd4a69cdb9f1365355e355d1ef413f98fc496785ffe3e2: Status 404 returned error can't find the container with id 5162d422e5479ab753bd4a69cdb9f1365355e355d1ef413f98fc496785ffe3e2 Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.155169 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nb2cx"] Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.159118 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nb2cx"] Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.159286 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.270106 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-catalog-content\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.270165 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jgt7\" (UniqueName: \"kubernetes.io/projected/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-kube-api-access-5jgt7\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.270248 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-utilities\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.372386 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-catalog-content\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.372454 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jgt7\" (UniqueName: \"kubernetes.io/projected/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-kube-api-access-5jgt7\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.372509 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-utilities\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.372971 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-utilities\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.373175 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-catalog-content\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.404970 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jgt7\" (UniqueName: \"kubernetes.io/projected/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-kube-api-access-5jgt7\") pod \"redhat-marketplace-nb2cx\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.485332 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.573853 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerStarted","Data":"5162d422e5479ab753bd4a69cdb9f1365355e355d1ef413f98fc496785ffe3e2"} Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.575783 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d88dcba1-b955-4ada-b5ae-c98555a02260","Type":"ContainerStarted","Data":"a517707eef52d14b8a88bb8fd7338af132eed0cb2f4cc6e50683d2f58cbb2713"} Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.575933 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.585825 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9ddrq"] Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.586845 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.593035 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9ddrq"] Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.598287 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.598517 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.598675 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-4kwgt" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.604630 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-kpg54"] Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.606565 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.620141 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-kpg54"] Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.623377 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.193127342 podStartE2EDuration="4.623361999s" podCreationTimestamp="2025-12-05 05:42:12 +0000 UTC" firstStartedPulling="2025-12-05 05:42:13.379799345 +0000 UTC m=+935.616529611" lastFinishedPulling="2025-12-05 05:42:15.810034011 +0000 UTC m=+938.046764268" observedRunningTime="2025-12-05 05:42:16.59396306 +0000 UTC m=+938.830693327" watchObservedRunningTime="2025-12-05 05:42:16.623361999 +0000 UTC m=+938.860092266" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779389 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc55c237-0d7b-419f-9b97-966d6b918bda-ovn-controller-tls-certs\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779422 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc55c237-0d7b-419f-9b97-966d6b918bda-combined-ca-bundle\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779462 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-run\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779535 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc55c237-0d7b-419f-9b97-966d6b918bda-scripts\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779592 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w8jm\" (UniqueName: \"kubernetes.io/projected/157a2e37-1f93-4c7b-817c-ac64edce5a2f-kube-api-access-9w8jm\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779610 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-run-ovn\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779668 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-etc-ovs\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779693 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-lib\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779768 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-log-ovn\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779806 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-run\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779821 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-log\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779878 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwb89\" (UniqueName: \"kubernetes.io/projected/bc55c237-0d7b-419f-9b97-966d6b918bda-kube-api-access-rwb89\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.779896 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/157a2e37-1f93-4c7b-817c-ac64edce5a2f-scripts\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.882658 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwb89\" (UniqueName: \"kubernetes.io/projected/bc55c237-0d7b-419f-9b97-966d6b918bda-kube-api-access-rwb89\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.882701 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/157a2e37-1f93-4c7b-817c-ac64edce5a2f-scripts\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.882866 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc55c237-0d7b-419f-9b97-966d6b918bda-ovn-controller-tls-certs\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.882901 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc55c237-0d7b-419f-9b97-966d6b918bda-combined-ca-bundle\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.882920 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-run\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.882978 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc55c237-0d7b-419f-9b97-966d6b918bda-scripts\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.882998 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w8jm\" (UniqueName: \"kubernetes.io/projected/157a2e37-1f93-4c7b-817c-ac64edce5a2f-kube-api-access-9w8jm\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.883045 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-run-ovn\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.883104 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-etc-ovs\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.883153 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-lib\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.883196 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-log-ovn\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.883294 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-run\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.883311 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-log\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.884300 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-log\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.885924 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-etc-ovs\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.886816 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-log-ovn\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.887616 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/157a2e37-1f93-4c7b-817c-ac64edce5a2f-scripts\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.887695 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-run-ovn\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.889246 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc55c237-0d7b-419f-9b97-966d6b918bda-combined-ca-bundle\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.889775 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bc55c237-0d7b-419f-9b97-966d6b918bda-var-run\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.889899 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc55c237-0d7b-419f-9b97-966d6b918bda-ovn-controller-tls-certs\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.891877 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc55c237-0d7b-419f-9b97-966d6b918bda-scripts\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.906839 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w8jm\" (UniqueName: \"kubernetes.io/projected/157a2e37-1f93-4c7b-817c-ac64edce5a2f-kube-api-access-9w8jm\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.906871 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwb89\" (UniqueName: \"kubernetes.io/projected/bc55c237-0d7b-419f-9b97-966d6b918bda-kube-api-access-rwb89\") pod \"ovn-controller-9ddrq\" (UID: \"bc55c237-0d7b-419f-9b97-966d6b918bda\") " pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.911160 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-lib\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.911469 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/157a2e37-1f93-4c7b-817c-ac64edce5a2f-var-run\") pod \"ovn-controller-ovs-kpg54\" (UID: \"157a2e37-1f93-4c7b-817c-ac64edce5a2f\") " pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.913804 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.934434 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.990521 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.992942 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.996717 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.996935 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.997221 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.997393 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.997491 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-6s2d4" Dec 05 05:42:16 crc kubenswrapper[4652]: I1205 05:42:16.998368 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.088315 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.088368 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-config\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.088436 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.088507 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.088528 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.088545 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2q4w\" (UniqueName: \"kubernetes.io/projected/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-kube-api-access-b2q4w\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.088594 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.088632 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.189630 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.189663 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.189685 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2q4w\" (UniqueName: \"kubernetes.io/projected/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-kube-api-access-b2q4w\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.189704 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.189760 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.189843 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.189862 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-config\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.189917 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.190781 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.193177 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-config\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.194079 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.194408 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.196111 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.197936 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.199232 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.210100 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2q4w\" (UniqueName: \"kubernetes.io/projected/c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6-kube-api-access-b2q4w\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.237956 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.249249 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nb2cx"] Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.352885 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.430047 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9ddrq"] Dec 05 05:42:17 crc kubenswrapper[4652]: W1205 05:42:17.445647 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc55c237_0d7b_419f_9b97_966d6b918bda.slice/crio-9b952ca1442f9e478b00371ed83f7df8d2737b8508aaac75521bd0ea0a6abb09 WatchSource:0}: Error finding container 9b952ca1442f9e478b00371ed83f7df8d2737b8508aaac75521bd0ea0a6abb09: Status 404 returned error can't find the container with id 9b952ca1442f9e478b00371ed83f7df8d2737b8508aaac75521bd0ea0a6abb09 Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.599191 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9ddrq" event={"ID":"bc55c237-0d7b-419f-9b97-966d6b918bda","Type":"ContainerStarted","Data":"9b952ca1442f9e478b00371ed83f7df8d2737b8508aaac75521bd0ea0a6abb09"} Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.616235 4652 generic.go:334] "Generic (PLEG): container finished" podID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerID="20d6b73fce3d1f4656f76eaaf9a60295a718fd3b64d7686ece85defc0fca38a5" exitCode=0 Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.617119 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nb2cx" event={"ID":"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3","Type":"ContainerDied","Data":"20d6b73fce3d1f4656f76eaaf9a60295a718fd3b64d7686ece85defc0fca38a5"} Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.617159 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nb2cx" event={"ID":"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3","Type":"ContainerStarted","Data":"3461289872d18e3ffe31d71e7cd085b44d3e06a2645e7b9561ad1ac0904d7546"} Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.623710 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-kpg54"] Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.877595 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.901437 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-xthfp"] Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.904824 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.918202 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 05:42:17 crc kubenswrapper[4652]: I1205 05:42:17.929364 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-xthfp"] Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.005845 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2a9269-04a5-4673-a350-9c491689231c-config\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.005918 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2a9269-04a5-4673-a350-9c491689231c-combined-ca-bundle\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.005955 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5e2a9269-04a5-4673-a350-9c491689231c-ovs-rundir\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.005975 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2a9269-04a5-4673-a350-9c491689231c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.005989 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzfz8\" (UniqueName: \"kubernetes.io/projected/5e2a9269-04a5-4673-a350-9c491689231c-kube-api-access-fzfz8\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.006013 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5e2a9269-04a5-4673-a350-9c491689231c-ovn-rundir\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.012515 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-vnpx6"] Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.020755 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fb75c485f-vkkdf"] Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.028715 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.032917 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.035483 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fb75c485f-vkkdf"] Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.107186 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tcxm\" (UniqueName: \"kubernetes.io/projected/1221691a-22b3-4c0b-8b69-61f5103e837a-kube-api-access-6tcxm\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.107257 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-config\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.107357 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2a9269-04a5-4673-a350-9c491689231c-config\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.108486 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2a9269-04a5-4673-a350-9c491689231c-config\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.108545 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.108660 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2a9269-04a5-4673-a350-9c491689231c-combined-ca-bundle\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.108713 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-dns-svc\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.108746 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5e2a9269-04a5-4673-a350-9c491689231c-ovs-rundir\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.108946 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5e2a9269-04a5-4673-a350-9c491689231c-ovs-rundir\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.108771 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2a9269-04a5-4673-a350-9c491689231c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.108998 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzfz8\" (UniqueName: \"kubernetes.io/projected/5e2a9269-04a5-4673-a350-9c491689231c-kube-api-access-fzfz8\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.109038 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5e2a9269-04a5-4673-a350-9c491689231c-ovn-rundir\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.109148 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5e2a9269-04a5-4673-a350-9c491689231c-ovn-rundir\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.113817 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2a9269-04a5-4673-a350-9c491689231c-combined-ca-bundle\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.113858 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e2a9269-04a5-4673-a350-9c491689231c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.122986 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzfz8\" (UniqueName: \"kubernetes.io/projected/5e2a9269-04a5-4673-a350-9c491689231c-kube-api-access-fzfz8\") pod \"ovn-controller-metrics-xthfp\" (UID: \"5e2a9269-04a5-4673-a350-9c491689231c\") " pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.210619 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-dns-svc\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.210791 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tcxm\" (UniqueName: \"kubernetes.io/projected/1221691a-22b3-4c0b-8b69-61f5103e837a-kube-api-access-6tcxm\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.210839 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-config\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.210873 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.211818 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-dns-svc\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.212403 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.213867 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-config\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.238335 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tcxm\" (UniqueName: \"kubernetes.io/projected/1221691a-22b3-4c0b-8b69-61f5103e837a-kube-api-access-6tcxm\") pod \"dnsmasq-dns-6fb75c485f-vkkdf\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.240506 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-xthfp" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.350242 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.655964 4652 generic.go:334] "Generic (PLEG): container finished" podID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerID="114c0e24af28a1c069ad9150036428555c32964e4c3cfc3dc32c1005a1dc8c33" exitCode=0 Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.656044 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nb2cx" event={"ID":"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3","Type":"ContainerDied","Data":"114c0e24af28a1c069ad9150036428555c32964e4c3cfc3dc32c1005a1dc8c33"} Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.662674 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kpg54" event={"ID":"157a2e37-1f93-4c7b-817c-ac64edce5a2f","Type":"ContainerStarted","Data":"f83ebeb29f1096af3c7f4fda14c6b3a22542bf1c4774c94c121649912550634c"} Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.664533 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6","Type":"ContainerStarted","Data":"c90c05b4acba250ea98ca0352e4a00eb89fc3ec50292b89c9277912932be0f1f"} Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.717674 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-xthfp"] Dec 05 05:42:18 crc kubenswrapper[4652]: I1205 05:42:18.902329 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fb75c485f-vkkdf"] Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.630401 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.640487 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.646364 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.646541 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.646769 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-nwll4" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.648496 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.665044 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.684210 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-xthfp" event={"ID":"5e2a9269-04a5-4673-a350-9c491689231c","Type":"ContainerStarted","Data":"bd76528710aaf36f98c71441349875eb7a95726e2faa36687dd0098be687afe6"} Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.686908 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" event={"ID":"1221691a-22b3-4c0b-8b69-61f5103e837a","Type":"ContainerStarted","Data":"2cc002b8b4bf1736374b51051adbc547962385ae4cfc8c53ca9abe8c19f305e8"} Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.762975 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.763041 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ec8106-75f2-456c-91ae-bf0f71304e9b-config\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.763076 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.763096 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.763254 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79ec8106-75f2-456c-91ae-bf0f71304e9b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.763285 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.763572 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sgwh\" (UniqueName: \"kubernetes.io/projected/79ec8106-75f2-456c-91ae-bf0f71304e9b-kube-api-access-7sgwh\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.763713 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79ec8106-75f2-456c-91ae-bf0f71304e9b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.865019 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79ec8106-75f2-456c-91ae-bf0f71304e9b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.865084 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.865135 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ec8106-75f2-456c-91ae-bf0f71304e9b-config\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.865161 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.865682 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.865946 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ec8106-75f2-456c-91ae-bf0f71304e9b-config\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.866112 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/79ec8106-75f2-456c-91ae-bf0f71304e9b-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.865435 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.869961 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79ec8106-75f2-456c-91ae-bf0f71304e9b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.870062 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.870163 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sgwh\" (UniqueName: \"kubernetes.io/projected/79ec8106-75f2-456c-91ae-bf0f71304e9b-kube-api-access-7sgwh\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.870225 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/79ec8106-75f2-456c-91ae-bf0f71304e9b-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.870611 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.872549 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.884697 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ec8106-75f2-456c-91ae-bf0f71304e9b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.887125 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sgwh\" (UniqueName: \"kubernetes.io/projected/79ec8106-75f2-456c-91ae-bf0f71304e9b-kube-api-access-7sgwh\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.891998 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-sb-0\" (UID: \"79ec8106-75f2-456c-91ae-bf0f71304e9b\") " pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:19 crc kubenswrapper[4652]: I1205 05:42:19.986518 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 05:42:20 crc kubenswrapper[4652]: I1205 05:42:20.488503 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 05:42:20 crc kubenswrapper[4652]: W1205 05:42:20.501420 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79ec8106_75f2_456c_91ae_bf0f71304e9b.slice/crio-61aac26cd4030727d00317cf741078d8a35a29a058f14a664874bd17c8f00c34 WatchSource:0}: Error finding container 61aac26cd4030727d00317cf741078d8a35a29a058f14a664874bd17c8f00c34: Status 404 returned error can't find the container with id 61aac26cd4030727d00317cf741078d8a35a29a058f14a664874bd17c8f00c34 Dec 05 05:42:20 crc kubenswrapper[4652]: I1205 05:42:20.698647 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nb2cx" event={"ID":"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3","Type":"ContainerStarted","Data":"ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72"} Dec 05 05:42:20 crc kubenswrapper[4652]: I1205 05:42:20.703847 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"79ec8106-75f2-456c-91ae-bf0f71304e9b","Type":"ContainerStarted","Data":"61aac26cd4030727d00317cf741078d8a35a29a058f14a664874bd17c8f00c34"} Dec 05 05:42:20 crc kubenswrapper[4652]: I1205 05:42:20.719687 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nb2cx" podStartSLOduration=2.293178662 podStartE2EDuration="4.719674097s" podCreationTimestamp="2025-12-05 05:42:16 +0000 UTC" firstStartedPulling="2025-12-05 05:42:17.62092335 +0000 UTC m=+939.857653607" lastFinishedPulling="2025-12-05 05:42:20.047418776 +0000 UTC m=+942.284149042" observedRunningTime="2025-12-05 05:42:20.713026555 +0000 UTC m=+942.949756822" watchObservedRunningTime="2025-12-05 05:42:20.719674097 +0000 UTC m=+942.956404365" Dec 05 05:42:21 crc kubenswrapper[4652]: I1205 05:42:21.714639 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerStarted","Data":"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b"} Dec 05 05:42:22 crc kubenswrapper[4652]: I1205 05:42:22.937481 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.641851 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pl5m4"] Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.648283 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pl5m4"] Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.648376 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.749642 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-xthfp" event={"ID":"5e2a9269-04a5-4673-a350-9c491689231c","Type":"ContainerStarted","Data":"9aa9d604a5a3ea5ef7258d74385c55699c2851baf7db552e02bd990dd835fcad"} Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.761675 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-utilities\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.761798 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjbpx\" (UniqueName: \"kubernetes.io/projected/c07a13fb-04c0-41fb-b8f5-376ac6355f61-kube-api-access-gjbpx\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.761889 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-catalog-content\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.771765 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-xthfp" podStartSLOduration=3.639452792 podStartE2EDuration="7.771752689s" podCreationTimestamp="2025-12-05 05:42:17 +0000 UTC" firstStartedPulling="2025-12-05 05:42:19.585162503 +0000 UTC m=+941.821892771" lastFinishedPulling="2025-12-05 05:42:23.717462401 +0000 UTC m=+945.954192668" observedRunningTime="2025-12-05 05:42:24.76453232 +0000 UTC m=+947.001262588" watchObservedRunningTime="2025-12-05 05:42:24.771752689 +0000 UTC m=+947.008482946" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.865386 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjbpx\" (UniqueName: \"kubernetes.io/projected/c07a13fb-04c0-41fb-b8f5-376ac6355f61-kube-api-access-gjbpx\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.865487 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-catalog-content\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.865692 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-utilities\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.866071 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-catalog-content\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.866103 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-utilities\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.882545 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjbpx\" (UniqueName: \"kubernetes.io/projected/c07a13fb-04c0-41fb-b8f5-376ac6355f61-kube-api-access-gjbpx\") pod \"redhat-operators-pl5m4\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:24 crc kubenswrapper[4652]: I1205 05:42:24.980873 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.078481 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b9b4959cc-6sfnn"] Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.094438 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6dbf544cc9-zdrbc"] Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.095771 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.102830 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.108839 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dbf544cc9-zdrbc"] Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.173474 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-nb\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.173522 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-dns-svc\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.173583 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-sb\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.173697 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prhp7\" (UniqueName: \"kubernetes.io/projected/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-kube-api-access-prhp7\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.173904 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-config\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.275825 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-config\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.275904 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-nb\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.275928 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-dns-svc\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.275963 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-sb\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.275993 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prhp7\" (UniqueName: \"kubernetes.io/projected/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-kube-api-access-prhp7\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.276925 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-config\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.278358 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-nb\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.279459 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-sb\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.279625 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-dns-svc\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.294831 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prhp7\" (UniqueName: \"kubernetes.io/projected/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-kube-api-access-prhp7\") pod \"dnsmasq-dns-6dbf544cc9-zdrbc\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.431888 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.501010 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pl5m4"] Dec 05 05:42:25 crc kubenswrapper[4652]: W1205 05:42:25.523886 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc07a13fb_04c0_41fb_b8f5_376ac6355f61.slice/crio-c8ad7666d3e2fc961335f69e9226b00b716ee8985aec9343c3d4c108d584c535 WatchSource:0}: Error finding container c8ad7666d3e2fc961335f69e9226b00b716ee8985aec9343c3d4c108d584c535: Status 404 returned error can't find the container with id c8ad7666d3e2fc961335f69e9226b00b716ee8985aec9343c3d4c108d584c535 Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.765968 4652 generic.go:334] "Generic (PLEG): container finished" podID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerID="29b06fc044130ae30cfab04fe66f4cfb679ee1a7df75fb4f78e7664dd7bf427c" exitCode=0 Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.768148 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5m4" event={"ID":"c07a13fb-04c0-41fb-b8f5-376ac6355f61","Type":"ContainerDied","Data":"29b06fc044130ae30cfab04fe66f4cfb679ee1a7df75fb4f78e7664dd7bf427c"} Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.768173 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5m4" event={"ID":"c07a13fb-04c0-41fb-b8f5-376ac6355f61","Type":"ContainerStarted","Data":"c8ad7666d3e2fc961335f69e9226b00b716ee8985aec9343c3d4c108d584c535"} Dec 05 05:42:25 crc kubenswrapper[4652]: I1205 05:42:25.874338 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dbf544cc9-zdrbc"] Dec 05 05:42:26 crc kubenswrapper[4652]: I1205 05:42:26.486134 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:26 crc kubenswrapper[4652]: I1205 05:42:26.486172 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:26 crc kubenswrapper[4652]: I1205 05:42:26.543580 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:26 crc kubenswrapper[4652]: I1205 05:42:26.774730 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5m4" event={"ID":"c07a13fb-04c0-41fb-b8f5-376ac6355f61","Type":"ContainerStarted","Data":"b9b397efca3da981d1e4e293a4c28e4f7e77f5c02f42ddf054d726ac4c129cc5"} Dec 05 05:42:26 crc kubenswrapper[4652]: I1205 05:42:26.776592 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" event={"ID":"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6","Type":"ContainerStarted","Data":"4c5e03058c62f4e12029ed22dab415ad8aa753e712ef7dc92a3838839b269d5d"} Dec 05 05:42:26 crc kubenswrapper[4652]: I1205 05:42:26.786260 4652 generic.go:334] "Generic (PLEG): container finished" podID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerID="4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b" exitCode=0 Dec 05 05:42:26 crc kubenswrapper[4652]: I1205 05:42:26.786452 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerDied","Data":"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b"} Dec 05 05:42:26 crc kubenswrapper[4652]: I1205 05:42:26.857159 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.629658 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9rrkl"] Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.632567 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.644041 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9rrkl"] Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.731315 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-utilities\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.731387 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-catalog-content\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.731457 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmw7h\" (UniqueName: \"kubernetes.io/projected/af44672e-c103-43c5-a9ed-2d9812053191-kube-api-access-qmw7h\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.832982 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-utilities\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.833034 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-catalog-content\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.833137 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmw7h\" (UniqueName: \"kubernetes.io/projected/af44672e-c103-43c5-a9ed-2d9812053191-kube-api-access-qmw7h\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.833427 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-utilities\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.833987 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-catalog-content\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.859581 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmw7h\" (UniqueName: \"kubernetes.io/projected/af44672e-c103-43c5-a9ed-2d9812053191-kube-api-access-qmw7h\") pod \"community-operators-9rrkl\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:27 crc kubenswrapper[4652]: I1205 05:42:27.970088 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:28 crc kubenswrapper[4652]: I1205 05:42:28.812360 4652 generic.go:334] "Generic (PLEG): container finished" podID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerID="b9b397efca3da981d1e4e293a4c28e4f7e77f5c02f42ddf054d726ac4c129cc5" exitCode=0 Dec 05 05:42:28 crc kubenswrapper[4652]: I1205 05:42:28.812473 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5m4" event={"ID":"c07a13fb-04c0-41fb-b8f5-376ac6355f61","Type":"ContainerDied","Data":"b9b397efca3da981d1e4e293a4c28e4f7e77f5c02f42ddf054d726ac4c129cc5"} Dec 05 05:42:29 crc kubenswrapper[4652]: I1205 05:42:29.113384 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9rrkl"] Dec 05 05:42:29 crc kubenswrapper[4652]: W1205 05:42:29.122971 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf44672e_c103_43c5_a9ed_2d9812053191.slice/crio-306accec47e57c7d24e9c38a58ae8cbdc207f33b629081934d4fa9782be7899a WatchSource:0}: Error finding container 306accec47e57c7d24e9c38a58ae8cbdc207f33b629081934d4fa9782be7899a: Status 404 returned error can't find the container with id 306accec47e57c7d24e9c38a58ae8cbdc207f33b629081934d4fa9782be7899a Dec 05 05:42:29 crc kubenswrapper[4652]: I1205 05:42:29.825756 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rrkl" event={"ID":"af44672e-c103-43c5-a9ed-2d9812053191","Type":"ContainerStarted","Data":"306accec47e57c7d24e9c38a58ae8cbdc207f33b629081934d4fa9782be7899a"} Dec 05 05:42:30 crc kubenswrapper[4652]: I1205 05:42:30.018924 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nb2cx"] Dec 05 05:42:30 crc kubenswrapper[4652]: I1205 05:42:30.019130 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nb2cx" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="registry-server" containerID="cri-o://ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72" gracePeriod=2 Dec 05 05:42:30 crc kubenswrapper[4652]: I1205 05:42:30.843352 4652 generic.go:334] "Generic (PLEG): container finished" podID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerID="ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72" exitCode=0 Dec 05 05:42:30 crc kubenswrapper[4652]: I1205 05:42:30.843426 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nb2cx" event={"ID":"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3","Type":"ContainerDied","Data":"ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72"} Dec 05 05:42:36 crc kubenswrapper[4652]: E1205 05:42:36.486518 4652 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72 is running failed: container process not found" containerID="ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 05:42:36 crc kubenswrapper[4652]: E1205 05:42:36.487359 4652 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72 is running failed: container process not found" containerID="ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 05:42:36 crc kubenswrapper[4652]: E1205 05:42:36.487769 4652 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72 is running failed: container process not found" containerID="ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 05:42:36 crc kubenswrapper[4652]: E1205 05:42:36.487819 4652 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-nb2cx" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="registry-server" Dec 05 05:42:36 crc kubenswrapper[4652]: I1205 05:42:36.885798 4652 generic.go:334] "Generic (PLEG): container finished" podID="af44672e-c103-43c5-a9ed-2d9812053191" containerID="8f5b4178eea34e47adc3fc5f62f380e56bc3a11ce9178b01f74ab2a242d22b8c" exitCode=0 Dec 05 05:42:36 crc kubenswrapper[4652]: I1205 05:42:36.885843 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rrkl" event={"ID":"af44672e-c103-43c5-a9ed-2d9812053191","Type":"ContainerDied","Data":"8f5b4178eea34e47adc3fc5f62f380e56bc3a11ce9178b01f74ab2a242d22b8c"} Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.737379 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t95s6"] Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.744053 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.757946 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t95s6"] Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.846479 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-catalog-content\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.846652 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-utilities\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.846730 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dntt8\" (UniqueName: \"kubernetes.io/projected/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-kube-api-access-dntt8\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.948226 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-catalog-content\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.948396 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-utilities\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.948494 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dntt8\" (UniqueName: \"kubernetes.io/projected/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-kube-api-access-dntt8\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.949205 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-catalog-content\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.949427 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-utilities\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:38 crc kubenswrapper[4652]: I1205 05:42:38.969302 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dntt8\" (UniqueName: \"kubernetes.io/projected/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-kube-api-access-dntt8\") pod \"certified-operators-t95s6\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:39 crc kubenswrapper[4652]: I1205 05:42:39.069892 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.756659 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.777834 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-utilities\") pod \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.777880 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-catalog-content\") pod \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.777969 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jgt7\" (UniqueName: \"kubernetes.io/projected/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-kube-api-access-5jgt7\") pod \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\" (UID: \"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3\") " Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.779359 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-utilities" (OuterVolumeSpecName: "utilities") pod "5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" (UID: "5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.783058 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-kube-api-access-5jgt7" (OuterVolumeSpecName: "kube-api-access-5jgt7") pod "5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" (UID: "5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3"). InnerVolumeSpecName "kube-api-access-5jgt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.793258 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" (UID: "5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.879909 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.879937 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.879947 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jgt7\" (UniqueName: \"kubernetes.io/projected/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3-kube-api-access-5jgt7\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.922133 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nb2cx" event={"ID":"5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3","Type":"ContainerDied","Data":"3461289872d18e3ffe31d71e7cd085b44d3e06a2645e7b9561ad1ac0904d7546"} Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.922187 4652 scope.go:117] "RemoveContainer" containerID="ea8c79ae5c332b9a72c30686e0fb0bdecefb42efba3665161f284e110ed3cb72" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.922188 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nb2cx" Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.956154 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nb2cx"] Dec 05 05:42:40 crc kubenswrapper[4652]: I1205 05:42:40.962139 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nb2cx"] Dec 05 05:42:42 crc kubenswrapper[4652]: I1205 05:42:42.141407 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" path="/var/lib/kubelet/pods/5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3/volumes" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.230513 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.231322 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.231467 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hbh4t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-notifications-server-0_openstack(86f82531-5219-4cd8-9432-1e8dc2a73b08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.232542 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-notifications-server-0" podUID="86f82531-5219-4cd8-9432-1e8dc2a73b08" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.667583 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.667635 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.667772 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wqkz8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(4aa077b9-9612-44cf-b163-d0c1f9468787): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.669833 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.974838 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current\\\"\"" pod="openstack/rabbitmq-server-0" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" Dec 05 05:42:48 crc kubenswrapper[4652]: E1205 05:42:48.975940 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-rabbitmq:current\\\"\"" pod="openstack/rabbitmq-notifications-server-0" podUID="86f82531-5219-4cd8-9432-1e8dc2a73b08" Dec 05 05:42:49 crc kubenswrapper[4652]: I1205 05:42:49.622797 4652 scope.go:117] "RemoveContainer" containerID="114c0e24af28a1c069ad9150036428555c32964e4c3cfc3dc32c1005a1dc8c33" Dec 05 05:42:52 crc kubenswrapper[4652]: E1205 05:42:52.954649 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-base:current" Dec 05 05:42:52 crc kubenswrapper[4652]: E1205 05:42:52.954691 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-base:current" Dec 05 05:42:52 crc kubenswrapper[4652]: E1205 05:42:52.954799 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.rdoproject.org/podified-master-centos10/openstack-ovn-base:current,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n56dhb5h5dfhd9h547h66h5f9h78hf7h8fhf8h86h7h687h54h67fh5cdh58h5fchffh6ch646hcfh69h5d8h6ch8hfch658h5h77hd7q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9w8jm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-kpg54_openstack(157a2e37-1f93-4c7b-817c-ac64edce5a2f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:52 crc kubenswrapper[4652]: E1205 05:42:52.955960 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-ovs-kpg54" podUID="157a2e37-1f93-4c7b-817c-ac64edce5a2f" Dec 05 05:42:52 crc kubenswrapper[4652]: E1205 05:42:52.998742 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ovn-base:current\\\"\"" pod="openstack/ovn-controller-ovs-kpg54" podUID="157a2e37-1f93-4c7b-817c-ac64edce5a2f" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.106836 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-controller:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.107044 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-controller:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.107157 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.rdoproject.org/podified-master-centos10/openstack-ovn-controller:current,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n56dhb5h5dfhd9h547h66h5f9h78hf7h8fhf8h86h7h687h54h67fh5cdh58h5fchffh6ch646hcfh69h5d8h6ch8hfch658h5h77hd7q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rwb89,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-9ddrq_openstack(bc55c237-0d7b-419f-9b97-966d6b918bda): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.109026 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-9ddrq" podUID="bc55c237-0d7b-419f-9b97-966d6b918bda" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.550036 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.550084 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.550191 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qb9lx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-b9b4959cc-6sfnn_openstack(bc2335c5-9634-47bf-ae9d-b0cd9454d3b1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.551342 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" podUID="bc2335c5-9634-47bf-ae9d-b0cd9454d3b1" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.759665 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-sb-db-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.759699 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ovn-sb-db-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.759816 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.rdoproject.org/podified-master-centos10/openstack-ovn-sb-db-server:current,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5ddh5f6h679h75h5f6h665h67h7ch656hfh65fhdch678h676hcdh7dh84hf8h58bh696h68ch5dch6dh58bh64h695h665h7fh7fh585h645h5b9q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7sgwh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(79ec8106-75f2-456c-91ae-bf0f71304e9b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:53 crc kubenswrapper[4652]: I1205 05:42:53.827956 4652 scope.go:117] "RemoveContainer" containerID="20d6b73fce3d1f4656f76eaaf9a60295a718fd3b64d7686ece85defc0fca38a5" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.878829 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.878868 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.878953 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fpfzj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8468885bfc-9wk2l_openstack(66a52801-4112-449d-a366-cdd147e56ca5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.880150 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" podUID="66a52801-4112-449d-a366-cdd147e56ca5" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.905101 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.905144 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.905240 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mhmt9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-86b8f4ff9-lvk6d_openstack(1f3358cf-54b3-4daa-a48c-6c3dd961ceb0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.906524 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" podUID="1f3358cf-54b3-4daa-a48c-6c3dd961ceb0" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.916398 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.916436 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.916519 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5c7h56dh5cfh8bh54fhbbhf4h5b9hdch67fhd7h55fh55fh6ch9h548h54ch665h647h6h8fhd6h5dfh5cdh58bh577h66fh695h5fbh55h77h5fcq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vs2xx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5449989c59-vnpx6_openstack(d89b380c-d2c9-431f-8034-1002ce6bf244): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.917977 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5449989c59-vnpx6" podUID="d89b380c-d2c9-431f-8034-1002ce6bf244" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.938699 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.938919 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.939000 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-master-centos10/openstack-neutron-server:current,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tvxjf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-545d49fd5c-t79sk_openstack(c13255f8-7735-451b-90d4-27cc4d0844b2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:42:53 crc kubenswrapper[4652]: E1205 05:42:53.940466 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" podUID="c13255f8-7735-451b-90d4-27cc4d0844b2" Dec 05 05:42:54 crc kubenswrapper[4652]: E1205 05:42:54.009295 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ovn-controller:current\\\"\"" pod="openstack/ovn-controller-9ddrq" podUID="bc55c237-0d7b-419f-9b97-966d6b918bda" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.257723 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t95s6"] Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.417876 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.441828 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598040 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-dns-svc\") pod \"d89b380c-d2c9-431f-8034-1002ce6bf244\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598105 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs2xx\" (UniqueName: \"kubernetes.io/projected/d89b380c-d2c9-431f-8034-1002ce6bf244-kube-api-access-vs2xx\") pod \"d89b380c-d2c9-431f-8034-1002ce6bf244\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598186 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66a52801-4112-449d-a366-cdd147e56ca5-config\") pod \"66a52801-4112-449d-a366-cdd147e56ca5\" (UID: \"66a52801-4112-449d-a366-cdd147e56ca5\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598225 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-config\") pod \"d89b380c-d2c9-431f-8034-1002ce6bf244\" (UID: \"d89b380c-d2c9-431f-8034-1002ce6bf244\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598282 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpfzj\" (UniqueName: \"kubernetes.io/projected/66a52801-4112-449d-a366-cdd147e56ca5-kube-api-access-fpfzj\") pod \"66a52801-4112-449d-a366-cdd147e56ca5\" (UID: \"66a52801-4112-449d-a366-cdd147e56ca5\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598495 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d89b380c-d2c9-431f-8034-1002ce6bf244" (UID: "d89b380c-d2c9-431f-8034-1002ce6bf244"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598660 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a52801-4112-449d-a366-cdd147e56ca5-config" (OuterVolumeSpecName: "config") pod "66a52801-4112-449d-a366-cdd147e56ca5" (UID: "66a52801-4112-449d-a366-cdd147e56ca5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598685 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-config" (OuterVolumeSpecName: "config") pod "d89b380c-d2c9-431f-8034-1002ce6bf244" (UID: "d89b380c-d2c9-431f-8034-1002ce6bf244"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598911 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66a52801-4112-449d-a366-cdd147e56ca5-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598930 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.598938 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d89b380c-d2c9-431f-8034-1002ce6bf244-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.616129 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66a52801-4112-449d-a366-cdd147e56ca5-kube-api-access-fpfzj" (OuterVolumeSpecName: "kube-api-access-fpfzj") pod "66a52801-4112-449d-a366-cdd147e56ca5" (UID: "66a52801-4112-449d-a366-cdd147e56ca5"). InnerVolumeSpecName "kube-api-access-fpfzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.617387 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d89b380c-d2c9-431f-8034-1002ce6bf244-kube-api-access-vs2xx" (OuterVolumeSpecName: "kube-api-access-vs2xx") pod "d89b380c-d2c9-431f-8034-1002ce6bf244" (UID: "d89b380c-d2c9-431f-8034-1002ce6bf244"). InnerVolumeSpecName "kube-api-access-vs2xx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.622645 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.627503 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.633207 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:54 crc kubenswrapper[4652]: E1205 05:42:54.646128 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="79ec8106-75f2-456c-91ae-bf0f71304e9b" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.701507 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvxjf\" (UniqueName: \"kubernetes.io/projected/c13255f8-7735-451b-90d4-27cc4d0844b2-kube-api-access-tvxjf\") pod \"c13255f8-7735-451b-90d4-27cc4d0844b2\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.701821 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpfzj\" (UniqueName: \"kubernetes.io/projected/66a52801-4112-449d-a366-cdd147e56ca5-kube-api-access-fpfzj\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.701837 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs2xx\" (UniqueName: \"kubernetes.io/projected/d89b380c-d2c9-431f-8034-1002ce6bf244-kube-api-access-vs2xx\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.705363 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c13255f8-7735-451b-90d4-27cc4d0844b2-kube-api-access-tvxjf" (OuterVolumeSpecName: "kube-api-access-tvxjf") pod "c13255f8-7735-451b-90d4-27cc4d0844b2" (UID: "c13255f8-7735-451b-90d4-27cc4d0844b2"). InnerVolumeSpecName "kube-api-access-tvxjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.802569 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhmt9\" (UniqueName: \"kubernetes.io/projected/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-kube-api-access-mhmt9\") pod \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.802800 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-config\") pod \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.802868 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc\") pod \"c13255f8-7735-451b-90d4-27cc4d0844b2\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.802889 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-dns-svc\") pod \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.802904 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-config\") pod \"c13255f8-7735-451b-90d4-27cc4d0844b2\" (UID: \"c13255f8-7735-451b-90d4-27cc4d0844b2\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.802926 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-dns-svc\") pod \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.802950 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb9lx\" (UniqueName: \"kubernetes.io/projected/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-kube-api-access-qb9lx\") pod \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\" (UID: \"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.802977 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-config\") pod \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\" (UID: \"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0\") " Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803120 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-config" (OuterVolumeSpecName: "config") pod "bc2335c5-9634-47bf-ae9d-b0cd9454d3b1" (UID: "bc2335c5-9634-47bf-ae9d-b0cd9454d3b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803301 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1f3358cf-54b3-4daa-a48c-6c3dd961ceb0" (UID: "1f3358cf-54b3-4daa-a48c-6c3dd961ceb0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803351 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc2335c5-9634-47bf-ae9d-b0cd9454d3b1" (UID: "bc2335c5-9634-47bf-ae9d-b0cd9454d3b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803367 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-config" (OuterVolumeSpecName: "config") pod "c13255f8-7735-451b-90d4-27cc4d0844b2" (UID: "c13255f8-7735-451b-90d4-27cc4d0844b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803390 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c13255f8-7735-451b-90d4-27cc4d0844b2" (UID: "c13255f8-7735-451b-90d4-27cc4d0844b2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803546 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803540 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-config" (OuterVolumeSpecName: "config") pod "1f3358cf-54b3-4daa-a48c-6c3dd961ceb0" (UID: "1f3358cf-54b3-4daa-a48c-6c3dd961ceb0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803572 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803582 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c13255f8-7735-451b-90d4-27cc4d0844b2-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803590 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803597 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvxjf\" (UniqueName: \"kubernetes.io/projected/c13255f8-7735-451b-90d4-27cc4d0844b2-kube-api-access-tvxjf\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.803605 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.806031 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-kube-api-access-qb9lx" (OuterVolumeSpecName: "kube-api-access-qb9lx") pod "bc2335c5-9634-47bf-ae9d-b0cd9454d3b1" (UID: "bc2335c5-9634-47bf-ae9d-b0cd9454d3b1"). InnerVolumeSpecName "kube-api-access-qb9lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.807957 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-kube-api-access-mhmt9" (OuterVolumeSpecName: "kube-api-access-mhmt9") pod "1f3358cf-54b3-4daa-a48c-6c3dd961ceb0" (UID: "1f3358cf-54b3-4daa-a48c-6c3dd961ceb0"). InnerVolumeSpecName "kube-api-access-mhmt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.904454 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb9lx\" (UniqueName: \"kubernetes.io/projected/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1-kube-api-access-qb9lx\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.904480 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:54 crc kubenswrapper[4652]: I1205 05:42:54.904491 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhmt9\" (UniqueName: \"kubernetes.io/projected/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0-kube-api-access-mhmt9\") on node \"crc\" DevicePath \"\"" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.020125 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerStarted","Data":"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.021709 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"79ec8106-75f2-456c-91ae-bf0f71304e9b","Type":"ContainerStarted","Data":"15483c201454123643438cdfcad3aa01677f20c52603ef69970db2d3adaacbaa"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.022944 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" Dec 05 05:42:55 crc kubenswrapper[4652]: E1205 05:42:55.022946 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ovn-sb-db-server:current\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="79ec8106-75f2-456c-91ae-bf0f71304e9b" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.022947 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b9b4959cc-6sfnn" event={"ID":"bc2335c5-9634-47bf-ae9d-b0cd9454d3b1","Type":"ContainerDied","Data":"3da92ca10c3898eeab69f240afa8ebf58c7ec85ac641d96bc223aebcd092dcc2"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.024262 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.024263 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8468885bfc-9wk2l" event={"ID":"66a52801-4112-449d-a366-cdd147e56ca5","Type":"ContainerDied","Data":"9782d931eb8f2be94bca8216cae668a20d1e4bd965014c029b3a776b5c0b742a"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.025240 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5449989c59-vnpx6" event={"ID":"d89b380c-d2c9-431f-8034-1002ce6bf244","Type":"ContainerDied","Data":"5ae1061e8e65959194fd8c1792ca842cd6c02b1b4c85157e567a87bb4f217eb1"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.025256 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5449989c59-vnpx6" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.026267 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" event={"ID":"1f3358cf-54b3-4daa-a48c-6c3dd961ceb0","Type":"ContainerDied","Data":"5ec942a02118d87548f33137e353b5ca0e98c6cd07cf373b219026f456b15851"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.026343 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86b8f4ff9-lvk6d" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.030652 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" event={"ID":"c13255f8-7735-451b-90d4-27cc4d0844b2","Type":"ContainerDied","Data":"365093c14fc2c737feb3e2541cca605e2c50da57d13b4dfeb12920268e63aa38"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.030718 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-545d49fd5c-t79sk" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.041442 4652 generic.go:334] "Generic (PLEG): container finished" podID="af44672e-c103-43c5-a9ed-2d9812053191" containerID="9d26c57462ea1792db92ae46f475220381ae83258407e2b0f7100782cda0314c" exitCode=0 Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.041489 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rrkl" event={"ID":"af44672e-c103-43c5-a9ed-2d9812053191","Type":"ContainerDied","Data":"9d26c57462ea1792db92ae46f475220381ae83258407e2b0f7100782cda0314c"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.070266 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1","Type":"ContainerStarted","Data":"cdd1176f8015c5c2c84154a16e1d7b8b3263b5cd2f9472378d85754f1f8c87d6"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.085548 4652 generic.go:334] "Generic (PLEG): container finished" podID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerID="3730242885a05feb9bd60b1f3f216fa0804e102df5d2a7db7dfef02ed3c905fd" exitCode=0 Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.085819 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" event={"ID":"1221691a-22b3-4c0b-8b69-61f5103e837a","Type":"ContainerDied","Data":"3730242885a05feb9bd60b1f3f216fa0804e102df5d2a7db7dfef02ed3c905fd"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.089924 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5m4" event={"ID":"c07a13fb-04c0-41fb-b8f5-376ac6355f61","Type":"ContainerStarted","Data":"05d42db172bdb10305ac0c6f7accfaa1712de1b2705164b11bc292996c07c2c0"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.093465 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"38d81e02-71a7-4093-b84c-135254187f85","Type":"ContainerStarted","Data":"415e29d377e54967d7f618e37cc8b38a0130f99e57b4e6625ea1589dbc45b59c"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.093999 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.095997 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6","Type":"ContainerStarted","Data":"0796fc6c8bfb0a449349ca6120f364e54c93b50dfd1f91c5c3d77351f17b9eb9"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.096024 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6","Type":"ContainerStarted","Data":"d699376c7d85e955bf35f048f1f489cbb934ba18a4ee2f0026d76c960fb46498"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.097913 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc","Type":"ContainerStarted","Data":"4c31f23a48eff9fad5f88fd5903c39743ba23531dff4e6848c57e56740e3ba1c"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.099276 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95s6" event={"ID":"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd","Type":"ContainerStarted","Data":"675aecf7516a513131cac7bcce0d2247bded0550f9f65a675569ed9ced9bd0d3"} Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.161930 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.139723852 podStartE2EDuration="40.161913627s" podCreationTimestamp="2025-12-05 05:42:15 +0000 UTC" firstStartedPulling="2025-12-05 05:42:17.921251986 +0000 UTC m=+940.157982254" lastFinishedPulling="2025-12-05 05:42:53.943441762 +0000 UTC m=+976.180172029" observedRunningTime="2025-12-05 05:42:55.142214152 +0000 UTC m=+977.378944419" watchObservedRunningTime="2025-12-05 05:42:55.161913627 +0000 UTC m=+977.398643894" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.188152 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.809075849 podStartE2EDuration="45.188137959s" podCreationTimestamp="2025-12-05 05:42:10 +0000 UTC" firstStartedPulling="2025-12-05 05:42:11.368863031 +0000 UTC m=+933.605593298" lastFinishedPulling="2025-12-05 05:42:53.747925141 +0000 UTC m=+975.984655408" observedRunningTime="2025-12-05 05:42:55.18739396 +0000 UTC m=+977.424124227" watchObservedRunningTime="2025-12-05 05:42:55.188137959 +0000 UTC m=+977.424868225" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.193188 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pl5m4" podStartSLOduration=3.182886097 podStartE2EDuration="31.193176794s" podCreationTimestamp="2025-12-05 05:42:24 +0000 UTC" firstStartedPulling="2025-12-05 05:42:25.770342535 +0000 UTC m=+948.007072802" lastFinishedPulling="2025-12-05 05:42:53.780633232 +0000 UTC m=+976.017363499" observedRunningTime="2025-12-05 05:42:55.17640074 +0000 UTC m=+977.413131007" watchObservedRunningTime="2025-12-05 05:42:55.193176794 +0000 UTC m=+977.429907061" Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.272207 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b9b4959cc-6sfnn"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.276262 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b9b4959cc-6sfnn"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.309369 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-lvk6d"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.315672 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86b8f4ff9-lvk6d"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.356471 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-vnpx6"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.383286 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5449989c59-vnpx6"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.404986 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-9wk2l"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.411332 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8468885bfc-9wk2l"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.420765 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-t79sk"] Dec 05 05:42:55 crc kubenswrapper[4652]: I1205 05:42:55.424945 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-545d49fd5c-t79sk"] Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.106380 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" event={"ID":"1221691a-22b3-4c0b-8b69-61f5103e837a","Type":"ContainerStarted","Data":"190da5818b2617756d87143abdfb25c377d0bf3255392b3f3ea1791353679ff5"} Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.106619 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.109467 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rrkl" event={"ID":"af44672e-c103-43c5-a9ed-2d9812053191","Type":"ContainerStarted","Data":"bd16f4321ffdbb2dd3f7c41ec74117c6111b39c38f30ecc72800782709b5d77d"} Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.110987 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f","Type":"ContainerStarted","Data":"8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594"} Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.116070 4652 generic.go:334] "Generic (PLEG): container finished" podID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" containerID="3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624" exitCode=0 Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.116151 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" event={"ID":"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6","Type":"ContainerDied","Data":"3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624"} Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.119729 4652 generic.go:334] "Generic (PLEG): container finished" podID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerID="0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034" exitCode=0 Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.119856 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95s6" event={"ID":"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd","Type":"ContainerStarted","Data":"14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0"} Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.119897 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95s6" event={"ID":"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd","Type":"ContainerDied","Data":"0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034"} Dec 05 05:42:56 crc kubenswrapper[4652]: E1205 05:42:56.122756 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ovn-sb-db-server:current\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="79ec8106-75f2-456c-91ae-bf0f71304e9b" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.124602 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" podStartSLOduration=3.766143336 podStartE2EDuration="38.124589048s" podCreationTimestamp="2025-12-05 05:42:18 +0000 UTC" firstStartedPulling="2025-12-05 05:42:19.584844655 +0000 UTC m=+941.821574923" lastFinishedPulling="2025-12-05 05:42:53.943290367 +0000 UTC m=+976.180020635" observedRunningTime="2025-12-05 05:42:56.119786306 +0000 UTC m=+978.356516572" watchObservedRunningTime="2025-12-05 05:42:56.124589048 +0000 UTC m=+978.361319315" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.133538 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f3358cf-54b3-4daa-a48c-6c3dd961ceb0" path="/var/lib/kubelet/pods/1f3358cf-54b3-4daa-a48c-6c3dd961ceb0/volumes" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.133896 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66a52801-4112-449d-a366-cdd147e56ca5" path="/var/lib/kubelet/pods/66a52801-4112-449d-a366-cdd147e56ca5/volumes" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.134214 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc2335c5-9634-47bf-ae9d-b0cd9454d3b1" path="/var/lib/kubelet/pods/bc2335c5-9634-47bf-ae9d-b0cd9454d3b1/volumes" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.134524 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c13255f8-7735-451b-90d4-27cc4d0844b2" path="/var/lib/kubelet/pods/c13255f8-7735-451b-90d4-27cc4d0844b2/volumes" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.134868 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d89b380c-d2c9-431f-8034-1002ce6bf244" path="/var/lib/kubelet/pods/d89b380c-d2c9-431f-8034-1002ce6bf244/volumes" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.180381 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9rrkl" podStartSLOduration=14.31117455 podStartE2EDuration="29.180365552s" podCreationTimestamp="2025-12-05 05:42:27 +0000 UTC" firstStartedPulling="2025-12-05 05:42:40.707899961 +0000 UTC m=+962.944630228" lastFinishedPulling="2025-12-05 05:42:55.577090963 +0000 UTC m=+977.813821230" observedRunningTime="2025-12-05 05:42:56.166963136 +0000 UTC m=+978.403693403" watchObservedRunningTime="2025-12-05 05:42:56.180365552 +0000 UTC m=+978.417095819" Dec 05 05:42:56 crc kubenswrapper[4652]: I1205 05:42:56.353858 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:57 crc kubenswrapper[4652]: I1205 05:42:57.127184 4652 generic.go:334] "Generic (PLEG): container finished" podID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerID="14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0" exitCode=0 Dec 05 05:42:57 crc kubenswrapper[4652]: I1205 05:42:57.127224 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95s6" event={"ID":"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd","Type":"ContainerDied","Data":"14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0"} Dec 05 05:42:57 crc kubenswrapper[4652]: I1205 05:42:57.129204 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerStarted","Data":"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47"} Dec 05 05:42:57 crc kubenswrapper[4652]: I1205 05:42:57.131604 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" event={"ID":"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6","Type":"ContainerStarted","Data":"930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb"} Dec 05 05:42:57 crc kubenswrapper[4652]: I1205 05:42:57.131672 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:42:57 crc kubenswrapper[4652]: I1205 05:42:57.353637 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:57 crc kubenswrapper[4652]: I1205 05:42:57.970705 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:57 crc kubenswrapper[4652]: I1205 05:42:57.970770 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:58 crc kubenswrapper[4652]: I1205 05:42:58.018995 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:42:58 crc kubenswrapper[4652]: I1205 05:42:58.034049 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" podStartSLOduration=4.845013204 podStartE2EDuration="33.034035554s" podCreationTimestamp="2025-12-05 05:42:25 +0000 UTC" firstStartedPulling="2025-12-05 05:42:25.880042039 +0000 UTC m=+948.116772306" lastFinishedPulling="2025-12-05 05:42:54.069064389 +0000 UTC m=+976.305794656" observedRunningTime="2025-12-05 05:42:57.165636237 +0000 UTC m=+979.402366504" watchObservedRunningTime="2025-12-05 05:42:58.034035554 +0000 UTC m=+980.270765820" Dec 05 05:42:58 crc kubenswrapper[4652]: I1205 05:42:58.138456 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95s6" event={"ID":"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd","Type":"ContainerStarted","Data":"7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df"} Dec 05 05:42:58 crc kubenswrapper[4652]: I1205 05:42:58.159657 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t95s6" podStartSLOduration=17.687854247 podStartE2EDuration="20.159642181s" podCreationTimestamp="2025-12-05 05:42:38 +0000 UTC" firstStartedPulling="2025-12-05 05:42:55.100483923 +0000 UTC m=+977.337214190" lastFinishedPulling="2025-12-05 05:42:57.572271856 +0000 UTC m=+979.809002124" observedRunningTime="2025-12-05 05:42:58.156455756 +0000 UTC m=+980.393186022" watchObservedRunningTime="2025-12-05 05:42:58.159642181 +0000 UTC m=+980.396372447" Dec 05 05:42:58 crc kubenswrapper[4652]: E1205 05:42:58.406714 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb12802a1_fea7_4427_9a10_7c6b2ac6c5bc.slice/crio-conmon-4c31f23a48eff9fad5f88fd5903c39743ba23531dff4e6848c57e56740e3ba1c.scope\": RecentStats: unable to find data in memory cache]" Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.071303 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.071340 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.105450 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.146259 4652 generic.go:334] "Generic (PLEG): container finished" podID="d16da2b5-fe11-4ded-9722-94f4ddb2c8e1" containerID="cdd1176f8015c5c2c84154a16e1d7b8b3263b5cd2f9472378d85754f1f8c87d6" exitCode=0 Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.146340 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1","Type":"ContainerDied","Data":"cdd1176f8015c5c2c84154a16e1d7b8b3263b5cd2f9472378d85754f1f8c87d6"} Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.149794 4652 generic.go:334] "Generic (PLEG): container finished" podID="b12802a1-fea7-4427-9a10-7c6b2ac6c5bc" containerID="4c31f23a48eff9fad5f88fd5903c39743ba23531dff4e6848c57e56740e3ba1c" exitCode=0 Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.149875 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc","Type":"ContainerDied","Data":"4c31f23a48eff9fad5f88fd5903c39743ba23531dff4e6848c57e56740e3ba1c"} Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.384010 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 05:42:59 crc kubenswrapper[4652]: I1205 05:42:59.433393 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.161790 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerStarted","Data":"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672"} Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.165983 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"d16da2b5-fe11-4ded-9722-94f4ddb2c8e1","Type":"ContainerStarted","Data":"4507450962b8304ab764d688dbf2628e058ff2ea1403767624a34cfd967ad776"} Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.168611 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b12802a1-fea7-4427-9a10-7c6b2ac6c5bc","Type":"ContainerStarted","Data":"b515dcff06d5bc02925ee4cc606865f137552ef14767d91a15c05a2373739ade"} Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.225332 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.3509719 podStartE2EDuration="51.225318428s" podCreationTimestamp="2025-12-05 05:42:09 +0000 UTC" firstStartedPulling="2025-12-05 05:42:11.070586955 +0000 UTC m=+933.307317223" lastFinishedPulling="2025-12-05 05:42:53.944933484 +0000 UTC m=+976.181663751" observedRunningTime="2025-12-05 05:43:00.224680841 +0000 UTC m=+982.461411108" watchObservedRunningTime="2025-12-05 05:43:00.225318428 +0000 UTC m=+982.462048696" Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.226118 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=4.69483598 podStartE2EDuration="48.226114103s" podCreationTimestamp="2025-12-05 05:42:12 +0000 UTC" firstStartedPulling="2025-12-05 05:42:15.805431686 +0000 UTC m=+938.042161953" lastFinishedPulling="2025-12-05 05:42:59.336709819 +0000 UTC m=+981.573440076" observedRunningTime="2025-12-05 05:43:00.205177284 +0000 UTC m=+982.441907551" watchObservedRunningTime="2025-12-05 05:43:00.226114103 +0000 UTC m=+982.462844370" Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.243806 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=9.230738603 podStartE2EDuration="53.243791881s" podCreationTimestamp="2025-12-05 05:42:07 +0000 UTC" firstStartedPulling="2025-12-05 05:42:09.734979625 +0000 UTC m=+931.971709892" lastFinishedPulling="2025-12-05 05:42:53.748032903 +0000 UTC m=+975.984763170" observedRunningTime="2025-12-05 05:43:00.243002046 +0000 UTC m=+982.479732313" watchObservedRunningTime="2025-12-05 05:43:00.243791881 +0000 UTC m=+982.480522148" Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.514842 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.514885 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 05:43:00 crc kubenswrapper[4652]: I1205 05:43:00.889659 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.924287 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fb75c485f-vkkdf"] Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.924835 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" podUID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerName="dnsmasq-dns" containerID="cri-o://190da5818b2617756d87143abdfb25c377d0bf3255392b3f3ea1791353679ff5" gracePeriod=10 Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.925758 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.949991 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-kmsw2"] Dec 05 05:43:02 crc kubenswrapper[4652]: E1205 05:43:02.950321 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="extract-content" Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.950332 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="extract-content" Dec 05 05:43:02 crc kubenswrapper[4652]: E1205 05:43:02.950350 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="registry-server" Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.950356 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="registry-server" Dec 05 05:43:02 crc kubenswrapper[4652]: E1205 05:43:02.950373 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="extract-utilities" Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.950379 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="extract-utilities" Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.950531 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d5bcbec-a16f-4d8a-88ba-fad5ca841bc3" containerName="registry-server" Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.951351 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:02 crc kubenswrapper[4652]: I1205 05:43:02.964853 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-kmsw2"] Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.033020 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-sb\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.033087 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-config\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.033112 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-dns-svc\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.033151 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-nb\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.033237 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85vzn\" (UniqueName: \"kubernetes.io/projected/330ff405-1db6-4136-b17b-679168d3125b-kube-api-access-85vzn\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.134973 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-nb\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.135052 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85vzn\" (UniqueName: \"kubernetes.io/projected/330ff405-1db6-4136-b17b-679168d3125b-kube-api-access-85vzn\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.135120 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-sb\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.135156 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-config\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.135177 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-dns-svc\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.136030 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-nb\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.136252 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-dns-svc\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.144485 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-config\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.144737 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-sb\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.157163 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85vzn\" (UniqueName: \"kubernetes.io/projected/330ff405-1db6-4136-b17b-679168d3125b-kube-api-access-85vzn\") pod \"dnsmasq-dns-76f9c4c8bc-kmsw2\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.249819 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4aa077b9-9612-44cf-b163-d0c1f9468787","Type":"ContainerStarted","Data":"10385fadc1c0181fdfc6ad208b74d8a4b1a93afe7b38437feb6e08a3cf4c076e"} Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.252537 4652 generic.go:334] "Generic (PLEG): container finished" podID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerID="190da5818b2617756d87143abdfb25c377d0bf3255392b3f3ea1791353679ff5" exitCode=0 Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.252604 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" event={"ID":"1221691a-22b3-4c0b-8b69-61f5103e837a","Type":"ContainerDied","Data":"190da5818b2617756d87143abdfb25c377d0bf3255392b3f3ea1791353679ff5"} Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.294813 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.471831 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.644137 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-config\") pod \"1221691a-22b3-4c0b-8b69-61f5103e837a\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.644431 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-ovsdbserver-nb\") pod \"1221691a-22b3-4c0b-8b69-61f5103e837a\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.644467 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-dns-svc\") pod \"1221691a-22b3-4c0b-8b69-61f5103e837a\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.644490 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tcxm\" (UniqueName: \"kubernetes.io/projected/1221691a-22b3-4c0b-8b69-61f5103e837a-kube-api-access-6tcxm\") pod \"1221691a-22b3-4c0b-8b69-61f5103e837a\" (UID: \"1221691a-22b3-4c0b-8b69-61f5103e837a\") " Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.656212 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1221691a-22b3-4c0b-8b69-61f5103e837a-kube-api-access-6tcxm" (OuterVolumeSpecName: "kube-api-access-6tcxm") pod "1221691a-22b3-4c0b-8b69-61f5103e837a" (UID: "1221691a-22b3-4c0b-8b69-61f5103e837a"). InnerVolumeSpecName "kube-api-access-6tcxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.675066 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-config" (OuterVolumeSpecName: "config") pod "1221691a-22b3-4c0b-8b69-61f5103e837a" (UID: "1221691a-22b3-4c0b-8b69-61f5103e837a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.676387 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1221691a-22b3-4c0b-8b69-61f5103e837a" (UID: "1221691a-22b3-4c0b-8b69-61f5103e837a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.679649 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1221691a-22b3-4c0b-8b69-61f5103e837a" (UID: "1221691a-22b3-4c0b-8b69-61f5103e837a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.743259 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-kmsw2"] Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.746639 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.746665 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.746675 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tcxm\" (UniqueName: \"kubernetes.io/projected/1221691a-22b3-4c0b-8b69-61f5103e837a-kube-api-access-6tcxm\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:03 crc kubenswrapper[4652]: I1205 05:43:03.746683 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1221691a-22b3-4c0b-8b69-61f5103e837a-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:03 crc kubenswrapper[4652]: W1205 05:43:03.746971 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330ff405_1db6_4136_b17b_679168d3125b.slice/crio-ea2e8f8e68b877ad182f02e2113c5e5e852fe0ba6c88c3029cba5648e329c9ad WatchSource:0}: Error finding container ea2e8f8e68b877ad182f02e2113c5e5e852fe0ba6c88c3029cba5648e329c9ad: Status 404 returned error can't find the container with id ea2e8f8e68b877ad182f02e2113c5e5e852fe0ba6c88c3029cba5648e329c9ad Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.068714 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 05:43:04 crc kubenswrapper[4652]: E1205 05:43:04.068984 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerName="init" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.068996 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerName="init" Dec 05 05:43:04 crc kubenswrapper[4652]: E1205 05:43:04.069021 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerName="dnsmasq-dns" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.069026 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerName="dnsmasq-dns" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.069160 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerName="dnsmasq-dns" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.073178 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.074437 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-hfgvz" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.074658 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.074661 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.074667 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.083894 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.255246 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x87nl\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-kube-api-access-x87nl\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.255488 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f145cea0-a420-4b52-95bb-83042cd8d09b-cache\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.255543 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.255610 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f145cea0-a420-4b52-95bb-83042cd8d09b-lock\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.255684 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.260302 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" event={"ID":"330ff405-1db6-4136-b17b-679168d3125b","Type":"ContainerStarted","Data":"ea2e8f8e68b877ad182f02e2113c5e5e852fe0ba6c88c3029cba5648e329c9ad"} Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.262056 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" event={"ID":"1221691a-22b3-4c0b-8b69-61f5103e837a","Type":"ContainerDied","Data":"2cc002b8b4bf1736374b51051adbc547962385ae4cfc8c53ca9abe8c19f305e8"} Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.262096 4652 scope.go:117] "RemoveContainer" containerID="190da5818b2617756d87143abdfb25c377d0bf3255392b3f3ea1791353679ff5" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.262196 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.280224 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fb75c485f-vkkdf"] Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.283944 4652 scope.go:117] "RemoveContainer" containerID="3730242885a05feb9bd60b1f3f216fa0804e102df5d2a7db7dfef02ed3c905fd" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.285174 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6fb75c485f-vkkdf"] Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.356632 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f145cea0-a420-4b52-95bb-83042cd8d09b-lock\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.356742 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.356775 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x87nl\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-kube-api-access-x87nl\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.356798 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f145cea0-a420-4b52-95bb-83042cd8d09b-cache\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.356836 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: E1205 05:43:04.356940 4652 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 05:43:04 crc kubenswrapper[4652]: E1205 05:43:04.356961 4652 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 05:43:04 crc kubenswrapper[4652]: E1205 05:43:04.357005 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift podName:f145cea0-a420-4b52-95bb-83042cd8d09b nodeName:}" failed. No retries permitted until 2025-12-05 05:43:04.856989884 +0000 UTC m=+987.093720151 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift") pod "swift-storage-0" (UID: "f145cea0-a420-4b52-95bb-83042cd8d09b") : configmap "swift-ring-files" not found Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.357029 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.357294 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f145cea0-a420-4b52-95bb-83042cd8d09b-lock\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.357301 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f145cea0-a420-4b52-95bb-83042cd8d09b-cache\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.370838 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x87nl\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-kube-api-access-x87nl\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.373072 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.449317 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.581460 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.664151 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.714036 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-hg67q"] Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.714897 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.717392 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.720713 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.720957 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.726081 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hg67q"] Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.865181 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-swiftconf\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.865364 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-combined-ca-bundle\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.865391 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-scripts\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.865418 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-dispersionconf\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.865441 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtmv6\" (UniqueName: \"kubernetes.io/projected/d882dd0e-1ba2-4b38-a548-9d47833aa687-kube-api-access-xtmv6\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.865460 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-ring-data-devices\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.865485 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d882dd0e-1ba2-4b38-a548-9d47833aa687-etc-swift\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.865510 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:04 crc kubenswrapper[4652]: E1205 05:43:04.865658 4652 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 05:43:04 crc kubenswrapper[4652]: E1205 05:43:04.865677 4652 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 05:43:04 crc kubenswrapper[4652]: E1205 05:43:04.865716 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift podName:f145cea0-a420-4b52-95bb-83042cd8d09b nodeName:}" failed. No retries permitted until 2025-12-05 05:43:05.865704304 +0000 UTC m=+988.102434562 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift") pod "swift-storage-0" (UID: "f145cea0-a420-4b52-95bb-83042cd8d09b") : configmap "swift-ring-files" not found Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.966894 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-swiftconf\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.967043 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-combined-ca-bundle\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.967065 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-scripts\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.967086 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-dispersionconf\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.967110 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtmv6\" (UniqueName: \"kubernetes.io/projected/d882dd0e-1ba2-4b38-a548-9d47833aa687-kube-api-access-xtmv6\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.967130 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-ring-data-devices\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.967146 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d882dd0e-1ba2-4b38-a548-9d47833aa687-etc-swift\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.967539 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d882dd0e-1ba2-4b38-a548-9d47833aa687-etc-swift\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.968269 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-ring-data-devices\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.968328 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-scripts\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.971181 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-dispersionconf\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.971231 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-swiftconf\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.971489 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-combined-ca-bundle\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.979176 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtmv6\" (UniqueName: \"kubernetes.io/projected/d882dd0e-1ba2-4b38-a548-9d47833aa687-kube-api-access-xtmv6\") pod \"swift-ring-rebalance-hg67q\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.981643 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:43:04 crc kubenswrapper[4652]: I1205 05:43:04.981690 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.017879 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.029516 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.270776 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kpg54" event={"ID":"157a2e37-1f93-4c7b-817c-ac64edce5a2f","Type":"ContainerStarted","Data":"66485e375c9774b90da7820d1c8f0a59d127f088a9b9b6188c80a8b0a295b991"} Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.276702 4652 generic.go:334] "Generic (PLEG): container finished" podID="330ff405-1db6-4136-b17b-679168d3125b" containerID="8df2150e5095790438c0007af828c4530b07f0b9601d520c9a28c6297a267826" exitCode=0 Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.276891 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" event={"ID":"330ff405-1db6-4136-b17b-679168d3125b","Type":"ContainerDied","Data":"8df2150e5095790438c0007af828c4530b07f0b9601d520c9a28c6297a267826"} Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.286584 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"86f82531-5219-4cd8-9432-1e8dc2a73b08","Type":"ContainerStarted","Data":"8f6498aedbc6df7c3fbd7f3863fdb9ca6535da9aece40f8f649c2d061282a171"} Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.398773 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.429447 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hg67q"] Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.435653 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.473281 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pl5m4"] Dec 05 05:43:05 crc kubenswrapper[4652]: I1205 05:43:05.881382 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:05 crc kubenswrapper[4652]: E1205 05:43:05.881599 4652 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 05:43:05 crc kubenswrapper[4652]: E1205 05:43:05.881746 4652 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 05:43:05 crc kubenswrapper[4652]: E1205 05:43:05.881807 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift podName:f145cea0-a420-4b52-95bb-83042cd8d09b nodeName:}" failed. No retries permitted until 2025-12-05 05:43:07.881790656 +0000 UTC m=+990.118520924 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift") pod "swift-storage-0" (UID: "f145cea0-a420-4b52-95bb-83042cd8d09b") : configmap "swift-ring-files" not found Dec 05 05:43:06 crc kubenswrapper[4652]: I1205 05:43:06.133703 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1221691a-22b3-4c0b-8b69-61f5103e837a" path="/var/lib/kubelet/pods/1221691a-22b3-4c0b-8b69-61f5103e837a/volumes" Dec 05 05:43:06 crc kubenswrapper[4652]: I1205 05:43:06.287766 4652 generic.go:334] "Generic (PLEG): container finished" podID="157a2e37-1f93-4c7b-817c-ac64edce5a2f" containerID="66485e375c9774b90da7820d1c8f0a59d127f088a9b9b6188c80a8b0a295b991" exitCode=0 Dec 05 05:43:06 crc kubenswrapper[4652]: I1205 05:43:06.287846 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kpg54" event={"ID":"157a2e37-1f93-4c7b-817c-ac64edce5a2f","Type":"ContainerDied","Data":"66485e375c9774b90da7820d1c8f0a59d127f088a9b9b6188c80a8b0a295b991"} Dec 05 05:43:06 crc kubenswrapper[4652]: I1205 05:43:06.290572 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" event={"ID":"330ff405-1db6-4136-b17b-679168d3125b","Type":"ContainerStarted","Data":"9b7f311a8faac7a447c8090ed4c737822c82df8d6ceddf87d7f17b2b8eb892ed"} Dec 05 05:43:06 crc kubenswrapper[4652]: I1205 05:43:06.290701 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:06 crc kubenswrapper[4652]: I1205 05:43:06.291860 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hg67q" event={"ID":"d882dd0e-1ba2-4b38-a548-9d47833aa687","Type":"ContainerStarted","Data":"febc9095d2b12a99eb06c50ceb4688b521147c21f8f88ae14a475a4515842c04"} Dec 05 05:43:06 crc kubenswrapper[4652]: I1205 05:43:06.314341 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" podStartSLOduration=4.314329894 podStartE2EDuration="4.314329894s" podCreationTimestamp="2025-12-05 05:43:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:43:06.310925339 +0000 UTC m=+988.547655606" watchObservedRunningTime="2025-12-05 05:43:06.314329894 +0000 UTC m=+988.551060162" Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.301340 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9ddrq" event={"ID":"bc55c237-0d7b-419f-9b97-966d6b918bda","Type":"ContainerStarted","Data":"18eaee8ccb0df6703c2b38b0831046c024d45eb14435f6dc03c19adf3732f4e5"} Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.302047 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-9ddrq" Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.305287 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pl5m4" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerName="registry-server" containerID="cri-o://05d42db172bdb10305ac0c6f7accfaa1712de1b2705164b11bc292996c07c2c0" gracePeriod=2 Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.305399 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kpg54" event={"ID":"157a2e37-1f93-4c7b-817c-ac64edce5a2f","Type":"ContainerStarted","Data":"8e6ba6d2e73ab465767fd9b61724fd63e6527abfdc2b066614c31dbbbadc5ac4"} Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.305424 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-kpg54" event={"ID":"157a2e37-1f93-4c7b-817c-ac64edce5a2f","Type":"ContainerStarted","Data":"dc027a10270f0dfd5153d58514d5957f1d0aa38d4753ea7739c96198edb58bc1"} Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.305535 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.305747 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.325448 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-9ddrq" podStartSLOduration=1.9504279979999999 podStartE2EDuration="51.325430862s" podCreationTimestamp="2025-12-05 05:42:16 +0000 UTC" firstStartedPulling="2025-12-05 05:42:17.447441155 +0000 UTC m=+939.684171422" lastFinishedPulling="2025-12-05 05:43:06.822444019 +0000 UTC m=+989.059174286" observedRunningTime="2025-12-05 05:43:07.321754496 +0000 UTC m=+989.558484763" watchObservedRunningTime="2025-12-05 05:43:07.325430862 +0000 UTC m=+989.562161118" Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.346770 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-kpg54" podStartSLOduration=4.236793483 podStartE2EDuration="51.346754658s" podCreationTimestamp="2025-12-05 05:42:16 +0000 UTC" firstStartedPulling="2025-12-05 05:42:17.674381375 +0000 UTC m=+939.911111641" lastFinishedPulling="2025-12-05 05:43:04.784342549 +0000 UTC m=+987.021072816" observedRunningTime="2025-12-05 05:43:07.341111186 +0000 UTC m=+989.577841454" watchObservedRunningTime="2025-12-05 05:43:07.346754658 +0000 UTC m=+989.583484925" Dec 05 05:43:07 crc kubenswrapper[4652]: I1205 05:43:07.922432 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:07 crc kubenswrapper[4652]: E1205 05:43:07.922680 4652 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 05:43:07 crc kubenswrapper[4652]: E1205 05:43:07.923041 4652 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 05:43:07 crc kubenswrapper[4652]: E1205 05:43:07.923101 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift podName:f145cea0-a420-4b52-95bb-83042cd8d09b nodeName:}" failed. No retries permitted until 2025-12-05 05:43:11.923082178 +0000 UTC m=+994.159812445 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift") pod "swift-storage-0" (UID: "f145cea0-a420-4b52-95bb-83042cd8d09b") : configmap "swift-ring-files" not found Dec 05 05:43:08 crc kubenswrapper[4652]: I1205 05:43:08.003903 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:43:08 crc kubenswrapper[4652]: I1205 05:43:08.038638 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9rrkl"] Dec 05 05:43:08 crc kubenswrapper[4652]: I1205 05:43:08.319987 4652 generic.go:334] "Generic (PLEG): container finished" podID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerID="05d42db172bdb10305ac0c6f7accfaa1712de1b2705164b11bc292996c07c2c0" exitCode=0 Dec 05 05:43:08 crc kubenswrapper[4652]: I1205 05:43:08.320113 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5m4" event={"ID":"c07a13fb-04c0-41fb-b8f5-376ac6355f61","Type":"ContainerDied","Data":"05d42db172bdb10305ac0c6f7accfaa1712de1b2705164b11bc292996c07c2c0"} Dec 05 05:43:08 crc kubenswrapper[4652]: I1205 05:43:08.320337 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9rrkl" podUID="af44672e-c103-43c5-a9ed-2d9812053191" containerName="registry-server" containerID="cri-o://bd16f4321ffdbb2dd3f7c41ec74117c6111b39c38f30ecc72800782709b5d77d" gracePeriod=2 Dec 05 05:43:08 crc kubenswrapper[4652]: I1205 05:43:08.353806 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6fb75c485f-vkkdf" podUID="1221691a-22b3-4c0b-8b69-61f5103e837a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: i/o timeout" Dec 05 05:43:09 crc kubenswrapper[4652]: I1205 05:43:09.104271 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:43:09 crc kubenswrapper[4652]: I1205 05:43:09.207248 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 05:43:09 crc kubenswrapper[4652]: I1205 05:43:09.207303 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 05:43:09 crc kubenswrapper[4652]: I1205 05:43:09.305517 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 05:43:09 crc kubenswrapper[4652]: I1205 05:43:09.330664 4652 generic.go:334] "Generic (PLEG): container finished" podID="af44672e-c103-43c5-a9ed-2d9812053191" containerID="bd16f4321ffdbb2dd3f7c41ec74117c6111b39c38f30ecc72800782709b5d77d" exitCode=0 Dec 05 05:43:09 crc kubenswrapper[4652]: I1205 05:43:09.330731 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rrkl" event={"ID":"af44672e-c103-43c5-a9ed-2d9812053191","Type":"ContainerDied","Data":"bd16f4321ffdbb2dd3f7c41ec74117c6111b39c38f30ecc72800782709b5d77d"} Dec 05 05:43:09 crc kubenswrapper[4652]: I1205 05:43:09.417920 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.044135 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t95s6"] Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.044610 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t95s6" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerName="registry-server" containerID="cri-o://7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df" gracePeriod=2 Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.527581 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-b27e-account-create-update-tntgc"] Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.529436 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.530968 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.547195 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b27e-account-create-update-tntgc"] Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.567002 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-operator-scripts\") pod \"keystone-b27e-account-create-update-tntgc\" (UID: \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\") " pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.567406 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mrh4\" (UniqueName: \"kubernetes.io/projected/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-kube-api-access-9mrh4\") pod \"keystone-b27e-account-create-update-tntgc\" (UID: \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\") " pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.571587 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-t9r7h"] Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.572784 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.589585 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-t9r7h"] Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.627325 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.669008 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-utilities\") pod \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.669725 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjbpx\" (UniqueName: \"kubernetes.io/projected/c07a13fb-04c0-41fb-b8f5-376ac6355f61-kube-api-access-gjbpx\") pod \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.669812 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-catalog-content\") pod \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\" (UID: \"c07a13fb-04c0-41fb-b8f5-376ac6355f61\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.669824 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-utilities" (OuterVolumeSpecName: "utilities") pod "c07a13fb-04c0-41fb-b8f5-376ac6355f61" (UID: "c07a13fb-04c0-41fb-b8f5-376ac6355f61"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.674044 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdxvd\" (UniqueName: \"kubernetes.io/projected/380194ba-35a8-4f22-ae98-fd2745e61bff-kube-api-access-zdxvd\") pod \"keystone-db-create-t9r7h\" (UID: \"380194ba-35a8-4f22-ae98-fd2745e61bff\") " pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.674143 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mrh4\" (UniqueName: \"kubernetes.io/projected/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-kube-api-access-9mrh4\") pod \"keystone-b27e-account-create-update-tntgc\" (UID: \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\") " pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.674330 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-operator-scripts\") pod \"keystone-b27e-account-create-update-tntgc\" (UID: \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\") " pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.674443 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/380194ba-35a8-4f22-ae98-fd2745e61bff-operator-scripts\") pod \"keystone-db-create-t9r7h\" (UID: \"380194ba-35a8-4f22-ae98-fd2745e61bff\") " pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.674579 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c07a13fb-04c0-41fb-b8f5-376ac6355f61-kube-api-access-gjbpx" (OuterVolumeSpecName: "kube-api-access-gjbpx") pod "c07a13fb-04c0-41fb-b8f5-376ac6355f61" (UID: "c07a13fb-04c0-41fb-b8f5-376ac6355f61"). InnerVolumeSpecName "kube-api-access-gjbpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.674672 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.675241 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-operator-scripts\") pod \"keystone-b27e-account-create-update-tntgc\" (UID: \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\") " pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.691042 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mrh4\" (UniqueName: \"kubernetes.io/projected/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-kube-api-access-9mrh4\") pod \"keystone-b27e-account-create-update-tntgc\" (UID: \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\") " pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.764565 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c07a13fb-04c0-41fb-b8f5-376ac6355f61" (UID: "c07a13fb-04c0-41fb-b8f5-376ac6355f61"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.777032 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdxvd\" (UniqueName: \"kubernetes.io/projected/380194ba-35a8-4f22-ae98-fd2745e61bff-kube-api-access-zdxvd\") pod \"keystone-db-create-t9r7h\" (UID: \"380194ba-35a8-4f22-ae98-fd2745e61bff\") " pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.777124 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/380194ba-35a8-4f22-ae98-fd2745e61bff-operator-scripts\") pod \"keystone-db-create-t9r7h\" (UID: \"380194ba-35a8-4f22-ae98-fd2745e61bff\") " pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.777242 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjbpx\" (UniqueName: \"kubernetes.io/projected/c07a13fb-04c0-41fb-b8f5-376ac6355f61-kube-api-access-gjbpx\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.777259 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c07a13fb-04c0-41fb-b8f5-376ac6355f61-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.778144 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/380194ba-35a8-4f22-ae98-fd2745e61bff-operator-scripts\") pod \"keystone-db-create-t9r7h\" (UID: \"380194ba-35a8-4f22-ae98-fd2745e61bff\") " pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.795405 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdxvd\" (UniqueName: \"kubernetes.io/projected/380194ba-35a8-4f22-ae98-fd2745e61bff-kube-api-access-zdxvd\") pod \"keystone-db-create-t9r7h\" (UID: \"380194ba-35a8-4f22-ae98-fd2745e61bff\") " pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.840260 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.871928 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.880227 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-utilities\") pod \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.880359 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dntt8\" (UniqueName: \"kubernetes.io/projected/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-kube-api-access-dntt8\") pod \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.880459 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-catalog-content\") pod \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\" (UID: \"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.883240 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-utilities" (OuterVolumeSpecName: "utilities") pod "cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" (UID: "cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.885926 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-kube-api-access-dntt8" (OuterVolumeSpecName: "kube-api-access-dntt8") pod "cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" (UID: "cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd"). InnerVolumeSpecName "kube-api-access-dntt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.888637 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-pssmm"] Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889041 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af44672e-c103-43c5-a9ed-2d9812053191" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889059 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="af44672e-c103-43c5-a9ed-2d9812053191" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889072 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerName="extract-utilities" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889079 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerName="extract-utilities" Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889092 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af44672e-c103-43c5-a9ed-2d9812053191" containerName="extract-content" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889098 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="af44672e-c103-43c5-a9ed-2d9812053191" containerName="extract-content" Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889111 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerName="extract-content" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889116 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerName="extract-content" Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889125 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af44672e-c103-43c5-a9ed-2d9812053191" containerName="extract-utilities" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889130 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="af44672e-c103-43c5-a9ed-2d9812053191" containerName="extract-utilities" Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889141 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889147 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889155 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerName="extract-content" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889160 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerName="extract-content" Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889174 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889181 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: E1205 05:43:10.889191 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerName="extract-utilities" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889198 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerName="extract-utilities" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889359 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889377 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="af44672e-c103-43c5-a9ed-2d9812053191" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.889389 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" containerName="registry-server" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.890013 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pssmm" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.897400 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-pssmm"] Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.921512 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.939219 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" (UID: "cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.982212 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-utilities\") pod \"af44672e-c103-43c5-a9ed-2d9812053191\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.982446 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-catalog-content\") pod \"af44672e-c103-43c5-a9ed-2d9812053191\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.982610 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmw7h\" (UniqueName: \"kubernetes.io/projected/af44672e-c103-43c5-a9ed-2d9812053191-kube-api-access-qmw7h\") pod \"af44672e-c103-43c5-a9ed-2d9812053191\" (UID: \"af44672e-c103-43c5-a9ed-2d9812053191\") " Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.982944 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abdd17b-b27d-4404-b8e4-845c4c04152a-operator-scripts\") pod \"placement-db-create-pssmm\" (UID: \"3abdd17b-b27d-4404-b8e4-845c4c04152a\") " pod="openstack/placement-db-create-pssmm" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.983008 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqcjc\" (UniqueName: \"kubernetes.io/projected/3abdd17b-b27d-4404-b8e4-845c4c04152a-kube-api-access-bqcjc\") pod \"placement-db-create-pssmm\" (UID: \"3abdd17b-b27d-4404-b8e4-845c4c04152a\") " pod="openstack/placement-db-create-pssmm" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.983098 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.983113 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.983122 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dntt8\" (UniqueName: \"kubernetes.io/projected/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd-kube-api-access-dntt8\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.983119 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-utilities" (OuterVolumeSpecName: "utilities") pod "af44672e-c103-43c5-a9ed-2d9812053191" (UID: "af44672e-c103-43c5-a9ed-2d9812053191"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.986767 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-d422-account-create-update-kkjwm"] Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.987333 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af44672e-c103-43c5-a9ed-2d9812053191-kube-api-access-qmw7h" (OuterVolumeSpecName: "kube-api-access-qmw7h") pod "af44672e-c103-43c5-a9ed-2d9812053191" (UID: "af44672e-c103-43c5-a9ed-2d9812053191"). InnerVolumeSpecName "kube-api-access-qmw7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.987916 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:10 crc kubenswrapper[4652]: I1205 05:43:10.990313 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.011881 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-d422-account-create-update-kkjwm"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.037654 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.056270 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af44672e-c103-43c5-a9ed-2d9812053191" (UID: "af44672e-c103-43c5-a9ed-2d9812053191"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.084314 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7721fbf3-a28b-4c0c-b581-54946dda1b02-operator-scripts\") pod \"placement-d422-account-create-update-kkjwm\" (UID: \"7721fbf3-a28b-4c0c-b581-54946dda1b02\") " pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.084428 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abdd17b-b27d-4404-b8e4-845c4c04152a-operator-scripts\") pod \"placement-db-create-pssmm\" (UID: \"3abdd17b-b27d-4404-b8e4-845c4c04152a\") " pod="openstack/placement-db-create-pssmm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.084470 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzppr\" (UniqueName: \"kubernetes.io/projected/7721fbf3-a28b-4c0c-b581-54946dda1b02-kube-api-access-kzppr\") pod \"placement-d422-account-create-update-kkjwm\" (UID: \"7721fbf3-a28b-4c0c-b581-54946dda1b02\") " pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.084534 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqcjc\" (UniqueName: \"kubernetes.io/projected/3abdd17b-b27d-4404-b8e4-845c4c04152a-kube-api-access-bqcjc\") pod \"placement-db-create-pssmm\" (UID: \"3abdd17b-b27d-4404-b8e4-845c4c04152a\") " pod="openstack/placement-db-create-pssmm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.084673 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.084690 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmw7h\" (UniqueName: \"kubernetes.io/projected/af44672e-c103-43c5-a9ed-2d9812053191-kube-api-access-qmw7h\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.084700 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af44672e-c103-43c5-a9ed-2d9812053191-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.085203 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abdd17b-b27d-4404-b8e4-845c4c04152a-operator-scripts\") pod \"placement-db-create-pssmm\" (UID: \"3abdd17b-b27d-4404-b8e4-845c4c04152a\") " pod="openstack/placement-db-create-pssmm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.103588 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqcjc\" (UniqueName: \"kubernetes.io/projected/3abdd17b-b27d-4404-b8e4-845c4c04152a-kube-api-access-bqcjc\") pod \"placement-db-create-pssmm\" (UID: \"3abdd17b-b27d-4404-b8e4-845c4c04152a\") " pod="openstack/placement-db-create-pssmm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.185481 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7721fbf3-a28b-4c0c-b581-54946dda1b02-operator-scripts\") pod \"placement-d422-account-create-update-kkjwm\" (UID: \"7721fbf3-a28b-4c0c-b581-54946dda1b02\") " pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.185591 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzppr\" (UniqueName: \"kubernetes.io/projected/7721fbf3-a28b-4c0c-b581-54946dda1b02-kube-api-access-kzppr\") pod \"placement-d422-account-create-update-kkjwm\" (UID: \"7721fbf3-a28b-4c0c-b581-54946dda1b02\") " pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.187378 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7721fbf3-a28b-4c0c-b581-54946dda1b02-operator-scripts\") pod \"placement-d422-account-create-update-kkjwm\" (UID: \"7721fbf3-a28b-4c0c-b581-54946dda1b02\") " pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.201640 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pssmm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.206976 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzppr\" (UniqueName: \"kubernetes.io/projected/7721fbf3-a28b-4c0c-b581-54946dda1b02-kube-api-access-kzppr\") pod \"placement-d422-account-create-update-kkjwm\" (UID: \"7721fbf3-a28b-4c0c-b581-54946dda1b02\") " pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.315476 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.341236 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-t9r7h"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.349913 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b27e-account-create-update-tntgc"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.377945 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pl5m4" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.377941 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pl5m4" event={"ID":"c07a13fb-04c0-41fb-b8f5-376ac6355f61","Type":"ContainerDied","Data":"c8ad7666d3e2fc961335f69e9226b00b716ee8985aec9343c3d4c108d584c535"} Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.378075 4652 scope.go:117] "RemoveContainer" containerID="05d42db172bdb10305ac0c6f7accfaa1712de1b2705164b11bc292996c07c2c0" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.404584 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hg67q" event={"ID":"d882dd0e-1ba2-4b38-a548-9d47833aa687","Type":"ContainerStarted","Data":"25d4fbeaecf92b81f6be9d3267f7c477ee76c28acb464d780195a0a37c9dc0f1"} Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.413954 4652 generic.go:334] "Generic (PLEG): container finished" podID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" containerID="7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df" exitCode=0 Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.414013 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95s6" event={"ID":"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd","Type":"ContainerDied","Data":"7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df"} Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.414037 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95s6" event={"ID":"cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd","Type":"ContainerDied","Data":"675aecf7516a513131cac7bcce0d2247bded0550f9f65a675569ed9ced9bd0d3"} Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.414089 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t95s6" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.418503 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pl5m4"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.419396 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"79ec8106-75f2-456c-91ae-bf0f71304e9b","Type":"ContainerStarted","Data":"99ff27c6630cff334267e4cc7ac84c7a814393f40df5a3a10581b699523fe008"} Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.425538 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pl5m4"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.428299 4652 scope.go:117] "RemoveContainer" containerID="b9b397efca3da981d1e4e293a4c28e4f7e77f5c02f42ddf054d726ac4c129cc5" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.431079 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9rrkl" event={"ID":"af44672e-c103-43c5-a9ed-2d9812053191","Type":"ContainerDied","Data":"306accec47e57c7d24e9c38a58ae8cbdc207f33b629081934d4fa9782be7899a"} Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.431113 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9rrkl" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.440602 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-hg67q" podStartSLOduration=2.244901927 podStartE2EDuration="7.44058022s" podCreationTimestamp="2025-12-05 05:43:04 +0000 UTC" firstStartedPulling="2025-12-05 05:43:05.433329278 +0000 UTC m=+987.670059545" lastFinishedPulling="2025-12-05 05:43:10.62900757 +0000 UTC m=+992.865737838" observedRunningTime="2025-12-05 05:43:11.433998347 +0000 UTC m=+993.670728614" watchObservedRunningTime="2025-12-05 05:43:11.44058022 +0000 UTC m=+993.677310486" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.480744 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t95s6"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.494686 4652 scope.go:117] "RemoveContainer" containerID="29b06fc044130ae30cfab04fe66f4cfb679ee1a7df75fb4f78e7664dd7bf427c" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.514035 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t95s6"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.518102 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.39727525 podStartE2EDuration="53.518089809s" podCreationTimestamp="2025-12-05 05:42:18 +0000 UTC" firstStartedPulling="2025-12-05 05:42:20.5047155 +0000 UTC m=+942.741445766" lastFinishedPulling="2025-12-05 05:43:10.625530059 +0000 UTC m=+992.862260325" observedRunningTime="2025-12-05 05:43:11.466390654 +0000 UTC m=+993.703120942" watchObservedRunningTime="2025-12-05 05:43:11.518089809 +0000 UTC m=+993.754820076" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.523295 4652 scope.go:117] "RemoveContainer" containerID="7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.526118 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9rrkl"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.530898 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9rrkl"] Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.547811 4652 scope.go:117] "RemoveContainer" containerID="14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.576265 4652 scope.go:117] "RemoveContainer" containerID="0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.655754 4652 scope.go:117] "RemoveContainer" containerID="7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df" Dec 05 05:43:11 crc kubenswrapper[4652]: E1205 05:43:11.656987 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df\": container with ID starting with 7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df not found: ID does not exist" containerID="7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.657030 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df"} err="failed to get container status \"7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df\": rpc error: code = NotFound desc = could not find container \"7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df\": container with ID starting with 7b8a7074688c896492d07c5c8e09724f546708335f95fbf3e9ad6c6c8b7541df not found: ID does not exist" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.657055 4652 scope.go:117] "RemoveContainer" containerID="14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0" Dec 05 05:43:11 crc kubenswrapper[4652]: E1205 05:43:11.662675 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0\": container with ID starting with 14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0 not found: ID does not exist" containerID="14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.662712 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0"} err="failed to get container status \"14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0\": rpc error: code = NotFound desc = could not find container \"14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0\": container with ID starting with 14323b3624e2c23d486099b816e794621390124b133595a5e9f982b19b187df0 not found: ID does not exist" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.662743 4652 scope.go:117] "RemoveContainer" containerID="0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034" Dec 05 05:43:11 crc kubenswrapper[4652]: E1205 05:43:11.663044 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034\": container with ID starting with 0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034 not found: ID does not exist" containerID="0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.663072 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034"} err="failed to get container status \"0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034\": rpc error: code = NotFound desc = could not find container \"0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034\": container with ID starting with 0322efd28a4736ec39dc25b148d6b3797305dedf82154139eddf12a40ebd7034 not found: ID does not exist" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.663093 4652 scope.go:117] "RemoveContainer" containerID="bd16f4321ffdbb2dd3f7c41ec74117c6111b39c38f30ecc72800782709b5d77d" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.666384 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-pssmm"] Dec 05 05:43:11 crc kubenswrapper[4652]: W1205 05:43:11.669670 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3abdd17b_b27d_4404_b8e4_845c4c04152a.slice/crio-425cd23df9cf5b634f6dea1b3f26cd8352cfd8e290da97d2d0b01f7feed761df WatchSource:0}: Error finding container 425cd23df9cf5b634f6dea1b3f26cd8352cfd8e290da97d2d0b01f7feed761df: Status 404 returned error can't find the container with id 425cd23df9cf5b634f6dea1b3f26cd8352cfd8e290da97d2d0b01f7feed761df Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.705240 4652 scope.go:117] "RemoveContainer" containerID="9d26c57462ea1792db92ae46f475220381ae83258407e2b0f7100782cda0314c" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.730002 4652 scope.go:117] "RemoveContainer" containerID="8f5b4178eea34e47adc3fc5f62f380e56bc3a11ce9178b01f74ab2a242d22b8c" Dec 05 05:43:11 crc kubenswrapper[4652]: I1205 05:43:11.805044 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-d422-account-create-update-kkjwm"] Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.008450 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:12 crc kubenswrapper[4652]: E1205 05:43:12.008651 4652 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 05:43:12 crc kubenswrapper[4652]: E1205 05:43:12.008670 4652 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 05:43:12 crc kubenswrapper[4652]: E1205 05:43:12.008706 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift podName:f145cea0-a420-4b52-95bb-83042cd8d09b nodeName:}" failed. No retries permitted until 2025-12-05 05:43:20.008694011 +0000 UTC m=+1002.245424278 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift") pod "swift-storage-0" (UID: "f145cea0-a420-4b52-95bb-83042cd8d09b") : configmap "swift-ring-files" not found Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.133038 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af44672e-c103-43c5-a9ed-2d9812053191" path="/var/lib/kubelet/pods/af44672e-c103-43c5-a9ed-2d9812053191/volumes" Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.133958 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c07a13fb-04c0-41fb-b8f5-376ac6355f61" path="/var/lib/kubelet/pods/c07a13fb-04c0-41fb-b8f5-376ac6355f61/volumes" Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.134618 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd" path="/var/lib/kubelet/pods/cdc32328-6d21-4fb9-bce4-ef8ffb26e8cd/volumes" Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.438907 4652 generic.go:334] "Generic (PLEG): container finished" podID="3abdd17b-b27d-4404-b8e4-845c4c04152a" containerID="4c16140ac10c08488733b1e2c7cd4077a2b87972e291e28e41a347b35106d26b" exitCode=0 Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.440043 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pssmm" event={"ID":"3abdd17b-b27d-4404-b8e4-845c4c04152a","Type":"ContainerDied","Data":"4c16140ac10c08488733b1e2c7cd4077a2b87972e291e28e41a347b35106d26b"} Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.440093 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pssmm" event={"ID":"3abdd17b-b27d-4404-b8e4-845c4c04152a","Type":"ContainerStarted","Data":"425cd23df9cf5b634f6dea1b3f26cd8352cfd8e290da97d2d0b01f7feed761df"} Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.443770 4652 generic.go:334] "Generic (PLEG): container finished" podID="7f6ae44c-0c8e-43db-a1a0-77ed8ae83520" containerID="d4b2f7541bff3469e82bc2a38a7c194eba0aa8f0c5c38077e38245a48951957e" exitCode=0 Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.443807 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b27e-account-create-update-tntgc" event={"ID":"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520","Type":"ContainerDied","Data":"d4b2f7541bff3469e82bc2a38a7c194eba0aa8f0c5c38077e38245a48951957e"} Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.443821 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b27e-account-create-update-tntgc" event={"ID":"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520","Type":"ContainerStarted","Data":"0f320034cd38a106e7dd3327202b156a713001f574dce83b3d66efb65b579e59"} Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.445854 4652 generic.go:334] "Generic (PLEG): container finished" podID="7721fbf3-a28b-4c0c-b581-54946dda1b02" containerID="83a16c7851d170a2f679ecbd5b35876ef7f68d20f0b69cea038b41eadf2908b4" exitCode=0 Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.445891 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d422-account-create-update-kkjwm" event={"ID":"7721fbf3-a28b-4c0c-b581-54946dda1b02","Type":"ContainerDied","Data":"83a16c7851d170a2f679ecbd5b35876ef7f68d20f0b69cea038b41eadf2908b4"} Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.445905 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d422-account-create-update-kkjwm" event={"ID":"7721fbf3-a28b-4c0c-b581-54946dda1b02","Type":"ContainerStarted","Data":"e884a069c532e50ca29e7b3a4ef1a76560181941f42d1cb03a5015d76d2e1adb"} Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.446937 4652 generic.go:334] "Generic (PLEG): container finished" podID="380194ba-35a8-4f22-ae98-fd2745e61bff" containerID="b540e2589aa35a038ade87aaa4320725fa48cb7b7e97029a22128dff624b1594" exitCode=0 Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.447437 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t9r7h" event={"ID":"380194ba-35a8-4f22-ae98-fd2745e61bff","Type":"ContainerDied","Data":"b540e2589aa35a038ade87aaa4320725fa48cb7b7e97029a22128dff624b1594"} Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.447466 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t9r7h" event={"ID":"380194ba-35a8-4f22-ae98-fd2745e61bff","Type":"ContainerStarted","Data":"1a36d6f897eba4292e0c603b75f60fe3b18a9f2cc86d498fd479e0c01984a6fe"} Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.893107 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-rjlwl"] Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.894069 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:12 crc kubenswrapper[4652]: I1205 05:43:12.900696 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-rjlwl"] Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.000723 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-9607-account-create-update-p8nxg"] Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.001727 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.003226 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.007124 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-9607-account-create-update-p8nxg"] Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.024073 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwk8x\" (UniqueName: \"kubernetes.io/projected/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-kube-api-access-pwk8x\") pod \"watcher-db-create-rjlwl\" (UID: \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\") " pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.024545 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-operator-scripts\") pod \"watcher-db-create-rjlwl\" (UID: \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\") " pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.127253 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-operator-scripts\") pod \"watcher-9607-account-create-update-p8nxg\" (UID: \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\") " pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.127329 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwsjj\" (UniqueName: \"kubernetes.io/projected/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-kube-api-access-gwsjj\") pod \"watcher-9607-account-create-update-p8nxg\" (UID: \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\") " pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.127374 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwk8x\" (UniqueName: \"kubernetes.io/projected/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-kube-api-access-pwk8x\") pod \"watcher-db-create-rjlwl\" (UID: \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\") " pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.127407 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-operator-scripts\") pod \"watcher-db-create-rjlwl\" (UID: \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\") " pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.128628 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-operator-scripts\") pod \"watcher-db-create-rjlwl\" (UID: \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\") " pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.145600 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwk8x\" (UniqueName: \"kubernetes.io/projected/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-kube-api-access-pwk8x\") pod \"watcher-db-create-rjlwl\" (UID: \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\") " pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.209672 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.230163 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-operator-scripts\") pod \"watcher-9607-account-create-update-p8nxg\" (UID: \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\") " pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.230226 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwsjj\" (UniqueName: \"kubernetes.io/projected/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-kube-api-access-gwsjj\") pod \"watcher-9607-account-create-update-p8nxg\" (UID: \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\") " pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.230919 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-operator-scripts\") pod \"watcher-9607-account-create-update-p8nxg\" (UID: \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\") " pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.245030 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwsjj\" (UniqueName: \"kubernetes.io/projected/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-kube-api-access-gwsjj\") pod \"watcher-9607-account-create-update-p8nxg\" (UID: \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\") " pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.297919 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.317353 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.349239 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dbf544cc9-zdrbc"] Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.349978 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" podUID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" containerName="dnsmasq-dns" containerID="cri-o://930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb" gracePeriod=10 Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.625565 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-rjlwl"] Dec 05 05:43:13 crc kubenswrapper[4652]: W1205 05:43:13.629841 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf95d2d0b_364e_4bd4_88bd_ffe18c9f8102.slice/crio-446b2d64a2e4fd7e3a2581a6914aba0b48893a7e761e8a8c02c277bc7b51a7c4 WatchSource:0}: Error finding container 446b2d64a2e4fd7e3a2581a6914aba0b48893a7e761e8a8c02c277bc7b51a7c4: Status 404 returned error can't find the container with id 446b2d64a2e4fd7e3a2581a6914aba0b48893a7e761e8a8c02c277bc7b51a7c4 Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.811991 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.863388 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mrh4\" (UniqueName: \"kubernetes.io/projected/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-kube-api-access-9mrh4\") pod \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\" (UID: \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.863627 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-operator-scripts\") pod \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\" (UID: \"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.864463 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7f6ae44c-0c8e-43db-a1a0-77ed8ae83520" (UID: "7f6ae44c-0c8e-43db-a1a0-77ed8ae83520"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.868666 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-kube-api-access-9mrh4" (OuterVolumeSpecName: "kube-api-access-9mrh4") pod "7f6ae44c-0c8e-43db-a1a0-77ed8ae83520" (UID: "7f6ae44c-0c8e-43db-a1a0-77ed8ae83520"). InnerVolumeSpecName "kube-api-access-9mrh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.883906 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pssmm" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.893096 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.917956 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.944493 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.969670 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/380194ba-35a8-4f22-ae98-fd2745e61bff-operator-scripts\") pod \"380194ba-35a8-4f22-ae98-fd2745e61bff\" (UID: \"380194ba-35a8-4f22-ae98-fd2745e61bff\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.969821 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-sb\") pod \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.969923 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prhp7\" (UniqueName: \"kubernetes.io/projected/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-kube-api-access-prhp7\") pod \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.970082 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/380194ba-35a8-4f22-ae98-fd2745e61bff-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "380194ba-35a8-4f22-ae98-fd2745e61bff" (UID: "380194ba-35a8-4f22-ae98-fd2745e61bff"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.970232 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abdd17b-b27d-4404-b8e4-845c4c04152a-operator-scripts\") pod \"3abdd17b-b27d-4404-b8e4-845c4c04152a\" (UID: \"3abdd17b-b27d-4404-b8e4-845c4c04152a\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.970356 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-config\") pod \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.970442 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-nb\") pod \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.970535 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqcjc\" (UniqueName: \"kubernetes.io/projected/3abdd17b-b27d-4404-b8e4-845c4c04152a-kube-api-access-bqcjc\") pod \"3abdd17b-b27d-4404-b8e4-845c4c04152a\" (UID: \"3abdd17b-b27d-4404-b8e4-845c4c04152a\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.970641 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdxvd\" (UniqueName: \"kubernetes.io/projected/380194ba-35a8-4f22-ae98-fd2745e61bff-kube-api-access-zdxvd\") pod \"380194ba-35a8-4f22-ae98-fd2745e61bff\" (UID: \"380194ba-35a8-4f22-ae98-fd2745e61bff\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.970727 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-dns-svc\") pod \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\" (UID: \"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6\") " Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.971176 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/380194ba-35a8-4f22-ae98-fd2745e61bff-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.971249 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mrh4\" (UniqueName: \"kubernetes.io/projected/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-kube-api-access-9mrh4\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.971311 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.970749 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3abdd17b-b27d-4404-b8e4-845c4c04152a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3abdd17b-b27d-4404-b8e4-845c4c04152a" (UID: "3abdd17b-b27d-4404-b8e4-845c4c04152a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.974636 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3abdd17b-b27d-4404-b8e4-845c4c04152a-kube-api-access-bqcjc" (OuterVolumeSpecName: "kube-api-access-bqcjc") pod "3abdd17b-b27d-4404-b8e4-845c4c04152a" (UID: "3abdd17b-b27d-4404-b8e4-845c4c04152a"). InnerVolumeSpecName "kube-api-access-bqcjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.975130 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-kube-api-access-prhp7" (OuterVolumeSpecName: "kube-api-access-prhp7") pod "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" (UID: "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6"). InnerVolumeSpecName "kube-api-access-prhp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.977667 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/380194ba-35a8-4f22-ae98-fd2745e61bff-kube-api-access-zdxvd" (OuterVolumeSpecName: "kube-api-access-zdxvd") pod "380194ba-35a8-4f22-ae98-fd2745e61bff" (UID: "380194ba-35a8-4f22-ae98-fd2745e61bff"). InnerVolumeSpecName "kube-api-access-zdxvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:13 crc kubenswrapper[4652]: I1205 05:43:13.987072 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.006904 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" (UID: "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.012874 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-config" (OuterVolumeSpecName: "config") pod "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" (UID: "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.013688 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" (UID: "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.019685 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.033278 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" (UID: "74b1c3bc-23da-43ea-8b20-4b6d2447f4a6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.073220 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzppr\" (UniqueName: \"kubernetes.io/projected/7721fbf3-a28b-4c0c-b581-54946dda1b02-kube-api-access-kzppr\") pod \"7721fbf3-a28b-4c0c-b581-54946dda1b02\" (UID: \"7721fbf3-a28b-4c0c-b581-54946dda1b02\") " Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.073451 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7721fbf3-a28b-4c0c-b581-54946dda1b02-operator-scripts\") pod \"7721fbf3-a28b-4c0c-b581-54946dda1b02\" (UID: \"7721fbf3-a28b-4c0c-b581-54946dda1b02\") " Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.073889 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7721fbf3-a28b-4c0c-b581-54946dda1b02-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7721fbf3-a28b-4c0c-b581-54946dda1b02" (UID: "7721fbf3-a28b-4c0c-b581-54946dda1b02"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074193 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abdd17b-b27d-4404-b8e4-845c4c04152a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074213 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074222 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074231 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqcjc\" (UniqueName: \"kubernetes.io/projected/3abdd17b-b27d-4404-b8e4-845c4c04152a-kube-api-access-bqcjc\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074242 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdxvd\" (UniqueName: \"kubernetes.io/projected/380194ba-35a8-4f22-ae98-fd2745e61bff-kube-api-access-zdxvd\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074249 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074258 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074266 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prhp7\" (UniqueName: \"kubernetes.io/projected/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6-kube-api-access-prhp7\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.074273 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7721fbf3-a28b-4c0c-b581-54946dda1b02-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.075904 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7721fbf3-a28b-4c0c-b581-54946dda1b02-kube-api-access-kzppr" (OuterVolumeSpecName: "kube-api-access-kzppr") pod "7721fbf3-a28b-4c0c-b581-54946dda1b02" (UID: "7721fbf3-a28b-4c0c-b581-54946dda1b02"). InnerVolumeSpecName "kube-api-access-kzppr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.103645 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-9607-account-create-update-p8nxg"] Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.175540 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzppr\" (UniqueName: \"kubernetes.io/projected/7721fbf3-a28b-4c0c-b581-54946dda1b02-kube-api-access-kzppr\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.449232 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.451643 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.460962 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pssmm" event={"ID":"3abdd17b-b27d-4404-b8e4-845c4c04152a","Type":"ContainerDied","Data":"425cd23df9cf5b634f6dea1b3f26cd8352cfd8e290da97d2d0b01f7feed761df"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.460986 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="425cd23df9cf5b634f6dea1b3f26cd8352cfd8e290da97d2d0b01f7feed761df" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.461017 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pssmm" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.463655 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.463670 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" event={"ID":"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6","Type":"ContainerDied","Data":"930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.463730 4652 scope.go:117] "RemoveContainer" containerID="930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.463629 4652 generic.go:334] "Generic (PLEG): container finished" podID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" containerID="930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb" exitCode=0 Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.463817 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dbf544cc9-zdrbc" event={"ID":"74b1c3bc-23da-43ea-8b20-4b6d2447f4a6","Type":"ContainerDied","Data":"4c5e03058c62f4e12029ed22dab415ad8aa753e712ef7dc92a3838839b269d5d"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.465679 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-9607-account-create-update-p8nxg" event={"ID":"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2","Type":"ContainerStarted","Data":"b1c16519846df87cee6af3758d3a3b6b917bdb58f45ab7202013b40ac0e2176e"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.465704 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-9607-account-create-update-p8nxg" event={"ID":"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2","Type":"ContainerStarted","Data":"f0f9b4f283d83fba0aaf3efa913ad4e2befea3901cad6c97a080f28222a0794f"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.476435 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b27e-account-create-update-tntgc" event={"ID":"7f6ae44c-0c8e-43db-a1a0-77ed8ae83520","Type":"ContainerDied","Data":"0f320034cd38a106e7dd3327202b156a713001f574dce83b3d66efb65b579e59"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.476464 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f320034cd38a106e7dd3327202b156a713001f574dce83b3d66efb65b579e59" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.476513 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b27e-account-create-update-tntgc" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.494817 4652 scope.go:117] "RemoveContainer" containerID="3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.507611 4652 generic.go:334] "Generic (PLEG): container finished" podID="f95d2d0b-364e-4bd4-88bd-ffe18c9f8102" containerID="2a1bb7548b46d41253554853e5b5b68e1a82f21524a281e033123cd218d00e2c" exitCode=0 Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.507750 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-rjlwl" event={"ID":"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102","Type":"ContainerDied","Data":"2a1bb7548b46d41253554853e5b5b68e1a82f21524a281e033123cd218d00e2c"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.507849 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-rjlwl" event={"ID":"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102","Type":"ContainerStarted","Data":"446b2d64a2e4fd7e3a2581a6914aba0b48893a7e761e8a8c02c277bc7b51a7c4"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.513471 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dbf544cc9-zdrbc"] Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.526531 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6dbf544cc9-zdrbc"] Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.527324 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d422-account-create-update-kkjwm" event={"ID":"7721fbf3-a28b-4c0c-b581-54946dda1b02","Type":"ContainerDied","Data":"e884a069c532e50ca29e7b3a4ef1a76560181941f42d1cb03a5015d76d2e1adb"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.527352 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e884a069c532e50ca29e7b3a4ef1a76560181941f42d1cb03a5015d76d2e1adb" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.527401 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d422-account-create-update-kkjwm" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.529809 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-t9r7h" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.530129 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-t9r7h" event={"ID":"380194ba-35a8-4f22-ae98-fd2745e61bff","Type":"ContainerDied","Data":"1a36d6f897eba4292e0c603b75f60fe3b18a9f2cc86d498fd479e0c01984a6fe"} Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.530147 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a36d6f897eba4292e0c603b75f60fe3b18a9f2cc86d498fd479e0c01984a6fe" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.530276 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.535794 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.551925 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-9607-account-create-update-p8nxg" podStartSLOduration=2.5519141579999998 podStartE2EDuration="2.551914158s" podCreationTimestamp="2025-12-05 05:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:43:14.536112885 +0000 UTC m=+996.772843153" watchObservedRunningTime="2025-12-05 05:43:14.551914158 +0000 UTC m=+996.788644425" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.562674 4652 scope.go:117] "RemoveContainer" containerID="930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb" Dec 05 05:43:14 crc kubenswrapper[4652]: E1205 05:43:14.573692 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb\": container with ID starting with 930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb not found: ID does not exist" containerID="930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.573735 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb"} err="failed to get container status \"930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb\": rpc error: code = NotFound desc = could not find container \"930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb\": container with ID starting with 930d5e74daa115bd17fb5b6694c3e5c749c9a5877b35393797b63c317b4685bb not found: ID does not exist" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.573758 4652 scope.go:117] "RemoveContainer" containerID="3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624" Dec 05 05:43:14 crc kubenswrapper[4652]: E1205 05:43:14.577766 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624\": container with ID starting with 3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624 not found: ID does not exist" containerID="3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624" Dec 05 05:43:14 crc kubenswrapper[4652]: I1205 05:43:14.577859 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624"} err="failed to get container status \"3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624\": rpc error: code = NotFound desc = could not find container \"3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624\": container with ID starting with 3e74552f90118bc550106cec89c893802cb3293ff9ed33c5a4b202bc859ec624 not found: ID does not exist" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.537273 4652 generic.go:334] "Generic (PLEG): container finished" podID="9c3f37ef-831b-4a8c-a453-7fc85aaa37e2" containerID="b1c16519846df87cee6af3758d3a3b6b917bdb58f45ab7202013b40ac0e2176e" exitCode=0 Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.537367 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-9607-account-create-update-p8nxg" event={"ID":"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2","Type":"ContainerDied","Data":"b1c16519846df87cee6af3758d3a3b6b917bdb58f45ab7202013b40ac0e2176e"} Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.571839 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.832089 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.856070 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 05:43:15 crc kubenswrapper[4652]: E1205 05:43:15.856488 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3abdd17b-b27d-4404-b8e4-845c4c04152a" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.856565 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3abdd17b-b27d-4404-b8e4-845c4c04152a" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: E1205 05:43:15.856660 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f6ae44c-0c8e-43db-a1a0-77ed8ae83520" containerName="mariadb-account-create-update" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.856728 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f6ae44c-0c8e-43db-a1a0-77ed8ae83520" containerName="mariadb-account-create-update" Dec 05 05:43:15 crc kubenswrapper[4652]: E1205 05:43:15.856789 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" containerName="dnsmasq-dns" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.856835 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" containerName="dnsmasq-dns" Dec 05 05:43:15 crc kubenswrapper[4652]: E1205 05:43:15.856886 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" containerName="init" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.856939 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" containerName="init" Dec 05 05:43:15 crc kubenswrapper[4652]: E1205 05:43:15.856983 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f95d2d0b-364e-4bd4-88bd-ffe18c9f8102" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857045 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f95d2d0b-364e-4bd4-88bd-ffe18c9f8102" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: E1205 05:43:15.857094 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="380194ba-35a8-4f22-ae98-fd2745e61bff" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857137 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="380194ba-35a8-4f22-ae98-fd2745e61bff" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: E1205 05:43:15.857191 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7721fbf3-a28b-4c0c-b581-54946dda1b02" containerName="mariadb-account-create-update" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857235 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="7721fbf3-a28b-4c0c-b581-54946dda1b02" containerName="mariadb-account-create-update" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857409 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f95d2d0b-364e-4bd4-88bd-ffe18c9f8102" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857472 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="7721fbf3-a28b-4c0c-b581-54946dda1b02" containerName="mariadb-account-create-update" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857530 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="380194ba-35a8-4f22-ae98-fd2745e61bff" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857630 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="3abdd17b-b27d-4404-b8e4-845c4c04152a" containerName="mariadb-database-create" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857694 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f6ae44c-0c8e-43db-a1a0-77ed8ae83520" containerName="mariadb-account-create-update" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.857765 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" containerName="dnsmasq-dns" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.858546 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.861402 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.862513 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.862749 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.862948 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-b6jcd" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.868773 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.925650 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-operator-scripts\") pod \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\" (UID: \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\") " Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.925784 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwk8x\" (UniqueName: \"kubernetes.io/projected/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-kube-api-access-pwk8x\") pod \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\" (UID: \"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102\") " Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.926124 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f95d2d0b-364e-4bd4-88bd-ffe18c9f8102" (UID: "f95d2d0b-364e-4bd4-88bd-ffe18c9f8102"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.926355 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:15 crc kubenswrapper[4652]: I1205 05:43:15.930529 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-kube-api-access-pwk8x" (OuterVolumeSpecName: "kube-api-access-pwk8x") pod "f95d2d0b-364e-4bd4-88bd-ffe18c9f8102" (UID: "f95d2d0b-364e-4bd4-88bd-ffe18c9f8102"). InnerVolumeSpecName "kube-api-access-pwk8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.027935 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr7kl\" (UniqueName: \"kubernetes.io/projected/7425818b-e1fe-4aab-b8db-5c071afb7c9e-kube-api-access-xr7kl\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.027975 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7425818b-e1fe-4aab-b8db-5c071afb7c9e-config\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.028086 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.028101 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.028141 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7425818b-e1fe-4aab-b8db-5c071afb7c9e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.028154 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.028182 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7425818b-e1fe-4aab-b8db-5c071afb7c9e-scripts\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.028286 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwk8x\" (UniqueName: \"kubernetes.io/projected/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102-kube-api-access-pwk8x\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.129518 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.129569 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.129625 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7425818b-e1fe-4aab-b8db-5c071afb7c9e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.129643 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.129685 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7425818b-e1fe-4aab-b8db-5c071afb7c9e-scripts\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.129798 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr7kl\" (UniqueName: \"kubernetes.io/projected/7425818b-e1fe-4aab-b8db-5c071afb7c9e-kube-api-access-xr7kl\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.129822 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7425818b-e1fe-4aab-b8db-5c071afb7c9e-config\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.130688 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7425818b-e1fe-4aab-b8db-5c071afb7c9e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.130927 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7425818b-e1fe-4aab-b8db-5c071afb7c9e-scripts\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.132022 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7425818b-e1fe-4aab-b8db-5c071afb7c9e-config\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.132369 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.135245 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74b1c3bc-23da-43ea-8b20-4b6d2447f4a6" path="/var/lib/kubelet/pods/74b1c3bc-23da-43ea-8b20-4b6d2447f4a6/volumes" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.139032 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.140322 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7425818b-e1fe-4aab-b8db-5c071afb7c9e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.143248 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr7kl\" (UniqueName: \"kubernetes.io/projected/7425818b-e1fe-4aab-b8db-5c071afb7c9e-kube-api-access-xr7kl\") pod \"ovn-northd-0\" (UID: \"7425818b-e1fe-4aab-b8db-5c071afb7c9e\") " pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.170399 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.547369 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-rjlwl" event={"ID":"f95d2d0b-364e-4bd4-88bd-ffe18c9f8102","Type":"ContainerDied","Data":"446b2d64a2e4fd7e3a2581a6914aba0b48893a7e761e8a8c02c277bc7b51a7c4"} Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.547407 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="446b2d64a2e4fd7e3a2581a6914aba0b48893a7e761e8a8c02c277bc7b51a7c4" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.547463 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-rjlwl" Dec 05 05:43:16 crc kubenswrapper[4652]: I1205 05:43:16.564748 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:16.839725 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:16.839959 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="prometheus" containerID="cri-o://60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531" gracePeriod=600 Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:16.840307 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="thanos-sidecar" containerID="cri-o://dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672" gracePeriod=600 Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:16.840349 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="config-reloader" containerID="cri-o://5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47" gracePeriod=600 Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:16.880463 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.048382 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwsjj\" (UniqueName: \"kubernetes.io/projected/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-kube-api-access-gwsjj\") pod \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\" (UID: \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.048537 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-operator-scripts\") pod \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\" (UID: \"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.048912 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9c3f37ef-831b-4a8c-a453-7fc85aaa37e2" (UID: "9c3f37ef-831b-4a8c-a453-7fc85aaa37e2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.049824 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.053727 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-kube-api-access-gwsjj" (OuterVolumeSpecName: "kube-api-access-gwsjj") pod "9c3f37ef-831b-4a8c-a453-7fc85aaa37e2" (UID: "9c3f37ef-831b-4a8c-a453-7fc85aaa37e2"). InnerVolumeSpecName "kube-api-access-gwsjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.152141 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwsjj\" (UniqueName: \"kubernetes.io/projected/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2-kube-api-access-gwsjj\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.505939 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.554987 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-9607-account-create-update-p8nxg" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.554982 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-9607-account-create-update-p8nxg" event={"ID":"9c3f37ef-831b-4a8c-a453-7fc85aaa37e2","Type":"ContainerDied","Data":"f0f9b4f283d83fba0aaf3efa913ad4e2befea3901cad6c97a080f28222a0794f"} Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.555100 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0f9b4f283d83fba0aaf3efa913ad4e2befea3901cad6c97a080f28222a0794f" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559078 4652 generic.go:334] "Generic (PLEG): container finished" podID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerID="dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672" exitCode=0 Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559103 4652 generic.go:334] "Generic (PLEG): container finished" podID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerID="5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47" exitCode=0 Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559112 4652 generic.go:334] "Generic (PLEG): container finished" podID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerID="60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531" exitCode=0 Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559147 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559152 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerDied","Data":"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672"} Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559180 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerDied","Data":"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47"} Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559192 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerDied","Data":"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531"} Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559201 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dfd617e6-2edb-4ed2-9b66-fd8893ae9427","Type":"ContainerDied","Data":"5162d422e5479ab753bd4a69cdb9f1365355e355d1ef413f98fc496785ffe3e2"} Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.559217 4652 scope.go:117] "RemoveContainer" containerID="dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.560112 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7425818b-e1fe-4aab-b8db-5c071afb7c9e","Type":"ContainerStarted","Data":"d44508d3dbb50fd55e2e9162074864fcc4eb0fd2335cfdcbea746d4aaf1c59e2"} Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.575927 4652 scope.go:117] "RemoveContainer" containerID="5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.594664 4652 scope.go:117] "RemoveContainer" containerID="60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.617026 4652 scope.go:117] "RemoveContainer" containerID="4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.660439 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-web-config\") pod \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.660538 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6pxg\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-kube-api-access-n6pxg\") pod \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.660616 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config-out\") pod \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.660690 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-tls-assets\") pod \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.660861 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.660894 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-thanos-prometheus-http-client-file\") pod \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.660921 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-prometheus-metric-storage-rulefiles-0\") pod \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.661061 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config\") pod \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\" (UID: \"dfd617e6-2edb-4ed2-9b66-fd8893ae9427\") " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.661545 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "dfd617e6-2edb-4ed2-9b66-fd8893ae9427" (UID: "dfd617e6-2edb-4ed2-9b66-fd8893ae9427"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.665350 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "dfd617e6-2edb-4ed2-9b66-fd8893ae9427" (UID: "dfd617e6-2edb-4ed2-9b66-fd8893ae9427"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.665724 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "dfd617e6-2edb-4ed2-9b66-fd8893ae9427" (UID: "dfd617e6-2edb-4ed2-9b66-fd8893ae9427"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.665689 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config" (OuterVolumeSpecName: "config") pod "dfd617e6-2edb-4ed2-9b66-fd8893ae9427" (UID: "dfd617e6-2edb-4ed2-9b66-fd8893ae9427"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.665787 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config-out" (OuterVolumeSpecName: "config-out") pod "dfd617e6-2edb-4ed2-9b66-fd8893ae9427" (UID: "dfd617e6-2edb-4ed2-9b66-fd8893ae9427"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.665934 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-kube-api-access-n6pxg" (OuterVolumeSpecName: "kube-api-access-n6pxg") pod "dfd617e6-2edb-4ed2-9b66-fd8893ae9427" (UID: "dfd617e6-2edb-4ed2-9b66-fd8893ae9427"). InnerVolumeSpecName "kube-api-access-n6pxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.674108 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "dfd617e6-2edb-4ed2-9b66-fd8893ae9427" (UID: "dfd617e6-2edb-4ed2-9b66-fd8893ae9427"). InnerVolumeSpecName "pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.676632 4652 scope.go:117] "RemoveContainer" containerID="dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672" Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.676965 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672\": container with ID starting with dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672 not found: ID does not exist" containerID="dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.677000 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672"} err="failed to get container status \"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672\": rpc error: code = NotFound desc = could not find container \"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672\": container with ID starting with dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.677023 4652 scope.go:117] "RemoveContainer" containerID="5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47" Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.677296 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47\": container with ID starting with 5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47 not found: ID does not exist" containerID="5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.677324 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47"} err="failed to get container status \"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47\": rpc error: code = NotFound desc = could not find container \"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47\": container with ID starting with 5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.677345 4652 scope.go:117] "RemoveContainer" containerID="60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531" Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.677534 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531\": container with ID starting with 60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531 not found: ID does not exist" containerID="60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.677579 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531"} err="failed to get container status \"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531\": rpc error: code = NotFound desc = could not find container \"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531\": container with ID starting with 60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.677602 4652 scope.go:117] "RemoveContainer" containerID="4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b" Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.678253 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b\": container with ID starting with 4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b not found: ID does not exist" containerID="4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.678276 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b"} err="failed to get container status \"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b\": rpc error: code = NotFound desc = could not find container \"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b\": container with ID starting with 4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.678288 4652 scope.go:117] "RemoveContainer" containerID="dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.678517 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672"} err="failed to get container status \"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672\": rpc error: code = NotFound desc = could not find container \"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672\": container with ID starting with dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.678549 4652 scope.go:117] "RemoveContainer" containerID="5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.678867 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47"} err="failed to get container status \"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47\": rpc error: code = NotFound desc = could not find container \"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47\": container with ID starting with 5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.678886 4652 scope.go:117] "RemoveContainer" containerID="60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.679085 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531"} err="failed to get container status \"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531\": rpc error: code = NotFound desc = could not find container \"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531\": container with ID starting with 60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.679154 4652 scope.go:117] "RemoveContainer" containerID="4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.679374 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b"} err="failed to get container status \"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b\": rpc error: code = NotFound desc = could not find container \"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b\": container with ID starting with 4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.679395 4652 scope.go:117] "RemoveContainer" containerID="dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.679663 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672"} err="failed to get container status \"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672\": rpc error: code = NotFound desc = could not find container \"dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672\": container with ID starting with dc45be9416a2c9bfd50ac0f83312d692925d1cb86e0134d9b29afb86e8bcc672 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.679744 4652 scope.go:117] "RemoveContainer" containerID="5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.679962 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47"} err="failed to get container status \"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47\": rpc error: code = NotFound desc = could not find container \"5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47\": container with ID starting with 5f5a9f28aee62831f2e64547f5b922846c9f50d0f8ac836cb166103856cefc47 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.679984 4652 scope.go:117] "RemoveContainer" containerID="60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.680199 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531"} err="failed to get container status \"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531\": rpc error: code = NotFound desc = could not find container \"60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531\": container with ID starting with 60983b250a783a686199f105b771eb13ace74ddd447c06587e6b1a7200cb5531 not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.680283 4652 scope.go:117] "RemoveContainer" containerID="4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.680545 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b"} err="failed to get container status \"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b\": rpc error: code = NotFound desc = could not find container \"4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b\": container with ID starting with 4a73572f3c0bde5a9bef9d031f0718ccddb26ace966cb56b2fc6dd138c6abf2b not found: ID does not exist" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.681632 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-web-config" (OuterVolumeSpecName: "web-config") pod "dfd617e6-2edb-4ed2-9b66-fd8893ae9427" (UID: "dfd617e6-2edb-4ed2-9b66-fd8893ae9427"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.763700 4652 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-web-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.763755 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6pxg\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-kube-api-access-n6pxg\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.763767 4652 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config-out\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.763775 4652 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.763828 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") on node \"crc\" " Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.763871 4652 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.763882 4652 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.763890 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfd617e6-2edb-4ed2-9b66-fd8893ae9427-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.787548 4652 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.787923 4652 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc") on node "crc" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.869697 4652 reconciler_common.go:293] "Volume detached for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.884488 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.889945 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899397 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.899682 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="config-reloader" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899708 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="config-reloader" Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.899735 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="prometheus" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899741 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="prometheus" Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.899749 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c3f37ef-831b-4a8c-a453-7fc85aaa37e2" containerName="mariadb-account-create-update" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899754 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c3f37ef-831b-4a8c-a453-7fc85aaa37e2" containerName="mariadb-account-create-update" Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.899765 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="init-config-reloader" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899770 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="init-config-reloader" Dec 05 05:43:17 crc kubenswrapper[4652]: E1205 05:43:17.899779 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="thanos-sidecar" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899817 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="thanos-sidecar" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899958 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="thanos-sidecar" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899978 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="prometheus" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.899987 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c3f37ef-831b-4a8c-a453-7fc85aaa37e2" containerName="mariadb-account-create-update" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.900001 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" containerName="config-reloader" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.901249 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.903183 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.903289 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-bj7vd" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.903320 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.903294 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.903340 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.903376 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.907495 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 05:43:17 crc kubenswrapper[4652]: I1205 05:43:17.928078 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072115 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2bbbfb38-753e-463f-821f-1a98b2d68d38-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072151 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072185 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072221 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072238 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072268 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbqbs\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-kube-api-access-wbqbs\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072280 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072311 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072339 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2bbbfb38-753e-463f-821f-1a98b2d68d38-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072366 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-config\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.072396 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.132410 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfd617e6-2edb-4ed2-9b66-fd8893ae9427" path="/var/lib/kubelet/pods/dfd617e6-2edb-4ed2-9b66-fd8893ae9427/volumes" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.173808 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.173858 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.173876 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.173907 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbqbs\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-kube-api-access-wbqbs\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.173924 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.173954 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.173985 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2bbbfb38-753e-463f-821f-1a98b2d68d38-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.174013 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-config\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.174039 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.174082 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2bbbfb38-753e-463f-821f-1a98b2d68d38-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.174102 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.176002 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2bbbfb38-753e-463f-821f-1a98b2d68d38-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.177953 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.178070 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.178451 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.178597 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.179012 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.179601 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2bbbfb38-753e-463f-821f-1a98b2d68d38-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.179856 4652 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.179881 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0495d50add23caafd61e72bca8d5e7274e8f1a3737d1b608160f79f869a86c50/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.180000 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.180050 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-config\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.191099 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbqbs\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-kube-api-access-wbqbs\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.224191 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.518244 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.567385 4652 generic.go:334] "Generic (PLEG): container finished" podID="d882dd0e-1ba2-4b38-a548-9d47833aa687" containerID="25d4fbeaecf92b81f6be9d3267f7c477ee76c28acb464d780195a0a37c9dc0f1" exitCode=0 Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.567444 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hg67q" event={"ID":"d882dd0e-1ba2-4b38-a548-9d47833aa687","Type":"ContainerDied","Data":"25d4fbeaecf92b81f6be9d3267f7c477ee76c28acb464d780195a0a37c9dc0f1"} Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.571358 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7425818b-e1fe-4aab-b8db-5c071afb7c9e","Type":"ContainerStarted","Data":"93eedf9841ab09e8102639bd81924cf1af848460314f17bc9e5ed377e10d8a66"} Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.571381 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"7425818b-e1fe-4aab-b8db-5c071afb7c9e","Type":"ContainerStarted","Data":"9e002079ae70c93f3470c0f234c5308056b0ee5f0c1e49f63300c20c13eedc6a"} Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.571990 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.603535 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.155219963 podStartE2EDuration="3.603520433s" podCreationTimestamp="2025-12-05 05:43:15 +0000 UTC" firstStartedPulling="2025-12-05 05:43:16.572586157 +0000 UTC m=+998.809316424" lastFinishedPulling="2025-12-05 05:43:18.020886627 +0000 UTC m=+1000.257616894" observedRunningTime="2025-12-05 05:43:18.598005995 +0000 UTC m=+1000.834736262" watchObservedRunningTime="2025-12-05 05:43:18.603520433 +0000 UTC m=+1000.840250700" Dec 05 05:43:18 crc kubenswrapper[4652]: I1205 05:43:18.878593 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 05:43:18 crc kubenswrapper[4652]: W1205 05:43:18.879906 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2bbbfb38_753e_463f_821f_1a98b2d68d38.slice/crio-74afa5d80471d42342d2a11ce1fbb3c2fe1f7b6d02cd36b854b73564fd611c00 WatchSource:0}: Error finding container 74afa5d80471d42342d2a11ce1fbb3c2fe1f7b6d02cd36b854b73564fd611c00: Status 404 returned error can't find the container with id 74afa5d80471d42342d2a11ce1fbb3c2fe1f7b6d02cd36b854b73564fd611c00 Dec 05 05:43:19 crc kubenswrapper[4652]: I1205 05:43:19.578114 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerStarted","Data":"74afa5d80471d42342d2a11ce1fbb3c2fe1f7b6d02cd36b854b73564fd611c00"} Dec 05 05:43:19 crc kubenswrapper[4652]: I1205 05:43:19.960394 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.009691 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.015637 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f145cea0-a420-4b52-95bb-83042cd8d09b-etc-swift\") pod \"swift-storage-0\" (UID: \"f145cea0-a420-4b52-95bb-83042cd8d09b\") " pod="openstack/swift-storage-0" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.111212 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-combined-ca-bundle\") pod \"d882dd0e-1ba2-4b38-a548-9d47833aa687\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.111722 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-scripts\") pod \"d882dd0e-1ba2-4b38-a548-9d47833aa687\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.111771 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtmv6\" (UniqueName: \"kubernetes.io/projected/d882dd0e-1ba2-4b38-a548-9d47833aa687-kube-api-access-xtmv6\") pod \"d882dd0e-1ba2-4b38-a548-9d47833aa687\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.111882 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-dispersionconf\") pod \"d882dd0e-1ba2-4b38-a548-9d47833aa687\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.111912 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d882dd0e-1ba2-4b38-a548-9d47833aa687-etc-swift\") pod \"d882dd0e-1ba2-4b38-a548-9d47833aa687\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.111936 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-swiftconf\") pod \"d882dd0e-1ba2-4b38-a548-9d47833aa687\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.111953 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-ring-data-devices\") pod \"d882dd0e-1ba2-4b38-a548-9d47833aa687\" (UID: \"d882dd0e-1ba2-4b38-a548-9d47833aa687\") " Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.112391 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d882dd0e-1ba2-4b38-a548-9d47833aa687" (UID: "d882dd0e-1ba2-4b38-a548-9d47833aa687"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.112789 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d882dd0e-1ba2-4b38-a548-9d47833aa687-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d882dd0e-1ba2-4b38-a548-9d47833aa687" (UID: "d882dd0e-1ba2-4b38-a548-9d47833aa687"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.114484 4652 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d882dd0e-1ba2-4b38-a548-9d47833aa687-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.114519 4652 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.116240 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d882dd0e-1ba2-4b38-a548-9d47833aa687-kube-api-access-xtmv6" (OuterVolumeSpecName: "kube-api-access-xtmv6") pod "d882dd0e-1ba2-4b38-a548-9d47833aa687" (UID: "d882dd0e-1ba2-4b38-a548-9d47833aa687"). InnerVolumeSpecName "kube-api-access-xtmv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.119098 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d882dd0e-1ba2-4b38-a548-9d47833aa687" (UID: "d882dd0e-1ba2-4b38-a548-9d47833aa687"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.215729 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtmv6\" (UniqueName: \"kubernetes.io/projected/d882dd0e-1ba2-4b38-a548-9d47833aa687-kube-api-access-xtmv6\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.215758 4652 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.285771 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.317292 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-scripts" (OuterVolumeSpecName: "scripts") pod "d882dd0e-1ba2-4b38-a548-9d47833aa687" (UID: "d882dd0e-1ba2-4b38-a548-9d47833aa687"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.317463 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d882dd0e-1ba2-4b38-a548-9d47833aa687-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.517293 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d882dd0e-1ba2-4b38-a548-9d47833aa687" (UID: "d882dd0e-1ba2-4b38-a548-9d47833aa687"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.518046 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d882dd0e-1ba2-4b38-a548-9d47833aa687" (UID: "d882dd0e-1ba2-4b38-a548-9d47833aa687"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.521158 4652 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.521182 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d882dd0e-1ba2-4b38-a548-9d47833aa687-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.585319 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hg67q" Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.585310 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hg67q" event={"ID":"d882dd0e-1ba2-4b38-a548-9d47833aa687","Type":"ContainerDied","Data":"febc9095d2b12a99eb06c50ceb4688b521147c21f8f88ae14a475a4515842c04"} Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.586094 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="febc9095d2b12a99eb06c50ceb4688b521147c21f8f88ae14a475a4515842c04" Dec 05 05:43:20 crc kubenswrapper[4652]: W1205 05:43:20.706815 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf145cea0_a420_4b52_95bb_83042cd8d09b.slice/crio-ec8ea7f75bda5656b128cd2173b91d201819c0cdc094c969dfc4d8b67471f356 WatchSource:0}: Error finding container ec8ea7f75bda5656b128cd2173b91d201819c0cdc094c969dfc4d8b67471f356: Status 404 returned error can't find the container with id ec8ea7f75bda5656b128cd2173b91d201819c0cdc094c969dfc4d8b67471f356 Dec 05 05:43:20 crc kubenswrapper[4652]: I1205 05:43:20.706918 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 05:43:21 crc kubenswrapper[4652]: I1205 05:43:21.594276 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerStarted","Data":"59e3b1f810ecb3612661b9ec5e1805957f6eebe1133c994ef57c7c832a10c0e7"} Dec 05 05:43:21 crc kubenswrapper[4652]: I1205 05:43:21.595953 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"ec8ea7f75bda5656b128cd2173b91d201819c0cdc094c969dfc4d8b67471f356"} Dec 05 05:43:22 crc kubenswrapper[4652]: I1205 05:43:22.602767 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"ef689ce185c4c9127178193dc45d610188961c9b52851ccca9a953dd62082175"} Dec 05 05:43:22 crc kubenswrapper[4652]: I1205 05:43:22.603001 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"4bccf3cabd067d9a8b81f1daca2dde4ae632b878612cc83a15099fc12720e078"} Dec 05 05:43:22 crc kubenswrapper[4652]: I1205 05:43:22.603012 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"fc93da23e80689dfd4b0a5b070fd643a2e64b4e9276435436292e57cdcaa6a7c"} Dec 05 05:43:22 crc kubenswrapper[4652]: I1205 05:43:22.603020 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"d825bb9e25296338f16e67a32994890a69434d83ee5a1a634cbbeac5212d0969"} Dec 05 05:43:24 crc kubenswrapper[4652]: I1205 05:43:24.617453 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"b090e0e45c1ff463be97294b5169d67ee7e5dba463e1a205c9abbfd49cf8d801"} Dec 05 05:43:24 crc kubenswrapper[4652]: I1205 05:43:24.617678 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"768ffe168c7b3cb8fe81041fcb80047f2362edf72d33b1434c46d0f56b8e99c1"} Dec 05 05:43:24 crc kubenswrapper[4652]: I1205 05:43:24.617688 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"a56bfba742db090bf7fd0435e5a75ade6e9dd12f7264cae750bec08868137482"} Dec 05 05:43:24 crc kubenswrapper[4652]: I1205 05:43:24.617708 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"d9106f7687675c486e0286556db81825de4a75c3fc61ef8f933aba2d70ca5292"} Dec 05 05:43:25 crc kubenswrapper[4652]: I1205 05:43:25.624377 4652 generic.go:334] "Generic (PLEG): container finished" podID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerID="59e3b1f810ecb3612661b9ec5e1805957f6eebe1133c994ef57c7c832a10c0e7" exitCode=0 Dec 05 05:43:25 crc kubenswrapper[4652]: I1205 05:43:25.624453 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerDied","Data":"59e3b1f810ecb3612661b9ec5e1805957f6eebe1133c994ef57c7c832a10c0e7"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.634334 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerStarted","Data":"42c8e400642241f60709f60f577a3079ef4abcfdf09fb72e32dbb3be5eecfe5c"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.635678 4652 generic.go:334] "Generic (PLEG): container finished" podID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerID="8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594" exitCode=0 Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.635721 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f","Type":"ContainerDied","Data":"8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.682286 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"5382f2cd331236f1847f69dd5d3cb8fd0e0f214536a80992d8bcaa1b9c4a2cc4"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.682324 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"4446edcefec324372a32f1fb34fc3471f52a16b0c1cb9ca5e482b28e593d824f"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.682334 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"e824c49357473d7e556328860fbcd301faadb7b379b24fe30d7f0101e013070f"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.682342 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"98976cf93bb12c74ee3e9bed8c66e136f834dabb3e0b543e46df39fe66f3f8f8"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.682349 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"8edf443bde5d25202aadc2ec544fe52fe619c199d0f43ea9c27186b7a3110010"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.682357 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"aa348f020959eb4fc35f2d9faef3fa9b0ac25f850211a95af83c046558020499"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.682364 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f145cea0-a420-4b52-95bb-83042cd8d09b","Type":"ContainerStarted","Data":"3be3e3fe861189a98d7367d94756ba9058c9c49c4e1434d3eef96d8fda136950"} Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.726432 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=18.858270291 podStartE2EDuration="23.726422619s" podCreationTimestamp="2025-12-05 05:43:03 +0000 UTC" firstStartedPulling="2025-12-05 05:43:20.708570583 +0000 UTC m=+1002.945300850" lastFinishedPulling="2025-12-05 05:43:25.57672291 +0000 UTC m=+1007.813453178" observedRunningTime="2025-12-05 05:43:26.723528983 +0000 UTC m=+1008.960259251" watchObservedRunningTime="2025-12-05 05:43:26.726422619 +0000 UTC m=+1008.963152887" Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.950104 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-frd2k"] Dec 05 05:43:26 crc kubenswrapper[4652]: E1205 05:43:26.950430 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d882dd0e-1ba2-4b38-a548-9d47833aa687" containerName="swift-ring-rebalance" Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.950448 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d882dd0e-1ba2-4b38-a548-9d47833aa687" containerName="swift-ring-rebalance" Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.950650 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d882dd0e-1ba2-4b38-a548-9d47833aa687" containerName="swift-ring-rebalance" Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.951383 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.952628 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 05:43:26 crc kubenswrapper[4652]: I1205 05:43:26.968222 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-frd2k"] Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.110023 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-swift-storage-0\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.110084 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-sb\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.110422 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwnj9\" (UniqueName: \"kubernetes.io/projected/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-kube-api-access-pwnj9\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.110480 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-svc\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.110514 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-config\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.110581 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-nb\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.212121 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-svc\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.212167 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-config\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.212205 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-nb\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.212297 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-swift-storage-0\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.212350 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-sb\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.212399 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwnj9\" (UniqueName: \"kubernetes.io/projected/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-kube-api-access-pwnj9\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.212966 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-svc\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.213136 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-nb\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.213169 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-swift-storage-0\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.215369 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-sb\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.215880 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-config\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.225836 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwnj9\" (UniqueName: \"kubernetes.io/projected/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-kube-api-access-pwnj9\") pod \"dnsmasq-dns-55b99bf79c-frd2k\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.410776 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.689565 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f","Type":"ContainerStarted","Data":"40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad"} Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.690041 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.709859 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.253770532 podStartE2EDuration="1m22.709845754s" podCreationTimestamp="2025-12-05 05:42:05 +0000 UTC" firstStartedPulling="2025-12-05 05:42:07.291880005 +0000 UTC m=+929.528610272" lastFinishedPulling="2025-12-05 05:42:53.747955228 +0000 UTC m=+975.984685494" observedRunningTime="2025-12-05 05:43:27.707628428 +0000 UTC m=+1009.944358695" watchObservedRunningTime="2025-12-05 05:43:27.709845754 +0000 UTC m=+1009.946576021" Dec 05 05:43:27 crc kubenswrapper[4652]: I1205 05:43:27.789817 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-frd2k"] Dec 05 05:43:28 crc kubenswrapper[4652]: I1205 05:43:28.697477 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerStarted","Data":"da54de35e0f9f9c2fd78f2aa8e0e8ea246aa354058e1bcc0769b060a37048646"} Dec 05 05:43:28 crc kubenswrapper[4652]: I1205 05:43:28.697722 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerStarted","Data":"7060044012efa9e072cd50a01291408aa3144ecad2fb0032419f049bcb1aa44a"} Dec 05 05:43:28 crc kubenswrapper[4652]: I1205 05:43:28.699406 4652 generic.go:334] "Generic (PLEG): container finished" podID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerID="4e1436f0144c95ea61b05b17005c147a1a8480993df19268e0bffaf48f4b22a2" exitCode=0 Dec 05 05:43:28 crc kubenswrapper[4652]: I1205 05:43:28.699472 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" event={"ID":"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1","Type":"ContainerDied","Data":"4e1436f0144c95ea61b05b17005c147a1a8480993df19268e0bffaf48f4b22a2"} Dec 05 05:43:28 crc kubenswrapper[4652]: I1205 05:43:28.699497 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" event={"ID":"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1","Type":"ContainerStarted","Data":"8008ec6549b9e647282b5035ce3ba8ead51c170eb11d246989ec63b017e94efc"} Dec 05 05:43:28 crc kubenswrapper[4652]: I1205 05:43:28.723745 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=11.72373088 podStartE2EDuration="11.72373088s" podCreationTimestamp="2025-12-05 05:43:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:43:28.720993208 +0000 UTC m=+1010.957723475" watchObservedRunningTime="2025-12-05 05:43:28.72373088 +0000 UTC m=+1010.960461148" Dec 05 05:43:29 crc kubenswrapper[4652]: I1205 05:43:29.706656 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" event={"ID":"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1","Type":"ContainerStarted","Data":"b5a31b91879b3d21f74f2d2dacf5daed3d54cafc6490c1747146e34c5f65fb0e"} Dec 05 05:43:29 crc kubenswrapper[4652]: I1205 05:43:29.722288 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" podStartSLOduration=3.7222752 podStartE2EDuration="3.7222752s" podCreationTimestamp="2025-12-05 05:43:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:43:29.7189269 +0000 UTC m=+1011.955657167" watchObservedRunningTime="2025-12-05 05:43:29.7222752 +0000 UTC m=+1011.959005466" Dec 05 05:43:30 crc kubenswrapper[4652]: I1205 05:43:30.712316 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:31 crc kubenswrapper[4652]: I1205 05:43:31.212577 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 05:43:33 crc kubenswrapper[4652]: I1205 05:43:33.519276 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:33 crc kubenswrapper[4652]: I1205 05:43:33.519321 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:33 crc kubenswrapper[4652]: I1205 05:43:33.524886 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:33 crc kubenswrapper[4652]: I1205 05:43:33.732941 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 05:43:34 crc kubenswrapper[4652]: I1205 05:43:34.735524 4652 generic.go:334] "Generic (PLEG): container finished" podID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerID="10385fadc1c0181fdfc6ad208b74d8a4b1a93afe7b38437feb6e08a3cf4c076e" exitCode=0 Dec 05 05:43:34 crc kubenswrapper[4652]: I1205 05:43:34.735602 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4aa077b9-9612-44cf-b163-d0c1f9468787","Type":"ContainerDied","Data":"10385fadc1c0181fdfc6ad208b74d8a4b1a93afe7b38437feb6e08a3cf4c076e"} Dec 05 05:43:35 crc kubenswrapper[4652]: I1205 05:43:35.742624 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4aa077b9-9612-44cf-b163-d0c1f9468787","Type":"ContainerStarted","Data":"0e3a5fca10802e00f74a6b2b4ac368814855aef5424c778c1299748257a5e6ec"} Dec 05 05:43:35 crc kubenswrapper[4652]: I1205 05:43:35.742988 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 05:43:35 crc kubenswrapper[4652]: I1205 05:43:35.744264 4652 generic.go:334] "Generic (PLEG): container finished" podID="86f82531-5219-4cd8-9432-1e8dc2a73b08" containerID="8f6498aedbc6df7c3fbd7f3863fdb9ca6535da9aece40f8f649c2d061282a171" exitCode=0 Dec 05 05:43:35 crc kubenswrapper[4652]: I1205 05:43:35.744297 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"86f82531-5219-4cd8-9432-1e8dc2a73b08","Type":"ContainerDied","Data":"8f6498aedbc6df7c3fbd7f3863fdb9ca6535da9aece40f8f649c2d061282a171"} Dec 05 05:43:35 crc kubenswrapper[4652]: I1205 05:43:35.767858 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371946.086935 podStartE2EDuration="1m30.767841075s" podCreationTimestamp="2025-12-05 05:42:05 +0000 UTC" firstStartedPulling="2025-12-05 05:42:06.887279331 +0000 UTC m=+929.124009598" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:43:35.76602293 +0000 UTC m=+1018.002753197" watchObservedRunningTime="2025-12-05 05:43:35.767841075 +0000 UTC m=+1018.004571343" Dec 05 05:43:36 crc kubenswrapper[4652]: I1205 05:43:36.753870 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"86f82531-5219-4cd8-9432-1e8dc2a73b08","Type":"ContainerStarted","Data":"b3eb882a1d33d87b81d6d2d28d55d65a1990c36480f5a190dc87257decbad75b"} Dec 05 05:43:36 crc kubenswrapper[4652]: I1205 05:43:36.754268 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:43:36 crc kubenswrapper[4652]: I1205 05:43:36.770199 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-notifications-server-0" podStartSLOduration=-9223371945.084589 podStartE2EDuration="1m31.770185924s" podCreationTimestamp="2025-12-05 05:42:05 +0000 UTC" firstStartedPulling="2025-12-05 05:42:07.486085783 +0000 UTC m=+929.722816049" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:43:36.767890943 +0000 UTC m=+1019.004621210" watchObservedRunningTime="2025-12-05 05:43:36.770185924 +0000 UTC m=+1019.006916190" Dec 05 05:43:36 crc kubenswrapper[4652]: I1205 05:43:36.964265 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:43:36 crc kubenswrapper[4652]: I1205 05:43:36.964704 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-kpg54" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.150214 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9ddrq-config-69rfz"] Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.151179 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.152953 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.160970 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9ddrq-config-69rfz"] Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.247118 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run-ovn\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.247155 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-log-ovn\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.247172 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.247345 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-additional-scripts\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.247502 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gt29\" (UniqueName: \"kubernetes.io/projected/5b497345-d12c-424e-9a3a-6f64260ff215-kube-api-access-7gt29\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.247575 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-scripts\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349356 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-additional-scripts\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349436 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gt29\" (UniqueName: \"kubernetes.io/projected/5b497345-d12c-424e-9a3a-6f64260ff215-kube-api-access-7gt29\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349474 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-scripts\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349591 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run-ovn\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349610 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-log-ovn\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349630 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349866 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349876 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run-ovn\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.349895 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-log-ovn\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.350267 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-additional-scripts\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.351462 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-scripts\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.365717 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gt29\" (UniqueName: \"kubernetes.io/projected/5b497345-d12c-424e-9a3a-6f64260ff215-kube-api-access-7gt29\") pod \"ovn-controller-9ddrq-config-69rfz\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.411729 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.450800 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-kmsw2"] Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.450984 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" podUID="330ff405-1db6-4136-b17b-679168d3125b" containerName="dnsmasq-dns" containerID="cri-o://9b7f311a8faac7a447c8090ed4c737822c82df8d6ceddf87d7f17b2b8eb892ed" gracePeriod=10 Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.465952 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.762154 4652 generic.go:334] "Generic (PLEG): container finished" podID="330ff405-1db6-4136-b17b-679168d3125b" containerID="9b7f311a8faac7a447c8090ed4c737822c82df8d6ceddf87d7f17b2b8eb892ed" exitCode=0 Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.762234 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" event={"ID":"330ff405-1db6-4136-b17b-679168d3125b","Type":"ContainerDied","Data":"9b7f311a8faac7a447c8090ed4c737822c82df8d6ceddf87d7f17b2b8eb892ed"} Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.853353 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.890018 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9ddrq-config-69rfz"] Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.960965 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-config\") pod \"330ff405-1db6-4136-b17b-679168d3125b\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.961023 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85vzn\" (UniqueName: \"kubernetes.io/projected/330ff405-1db6-4136-b17b-679168d3125b-kube-api-access-85vzn\") pod \"330ff405-1db6-4136-b17b-679168d3125b\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.961087 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-dns-svc\") pod \"330ff405-1db6-4136-b17b-679168d3125b\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.961110 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-sb\") pod \"330ff405-1db6-4136-b17b-679168d3125b\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.961138 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-nb\") pod \"330ff405-1db6-4136-b17b-679168d3125b\" (UID: \"330ff405-1db6-4136-b17b-679168d3125b\") " Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.965608 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/330ff405-1db6-4136-b17b-679168d3125b-kube-api-access-85vzn" (OuterVolumeSpecName: "kube-api-access-85vzn") pod "330ff405-1db6-4136-b17b-679168d3125b" (UID: "330ff405-1db6-4136-b17b-679168d3125b"). InnerVolumeSpecName "kube-api-access-85vzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.990779 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "330ff405-1db6-4136-b17b-679168d3125b" (UID: "330ff405-1db6-4136-b17b-679168d3125b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.991378 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "330ff405-1db6-4136-b17b-679168d3125b" (UID: "330ff405-1db6-4136-b17b-679168d3125b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.993028 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "330ff405-1db6-4136-b17b-679168d3125b" (UID: "330ff405-1db6-4136-b17b-679168d3125b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:37 crc kubenswrapper[4652]: I1205 05:43:37.994686 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-config" (OuterVolumeSpecName: "config") pod "330ff405-1db6-4136-b17b-679168d3125b" (UID: "330ff405-1db6-4136-b17b-679168d3125b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.063226 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.063383 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85vzn\" (UniqueName: \"kubernetes.io/projected/330ff405-1db6-4136-b17b-679168d3125b-kube-api-access-85vzn\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.063396 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.063405 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.063412 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/330ff405-1db6-4136-b17b-679168d3125b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.776814 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.776807 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76f9c4c8bc-kmsw2" event={"ID":"330ff405-1db6-4136-b17b-679168d3125b","Type":"ContainerDied","Data":"ea2e8f8e68b877ad182f02e2113c5e5e852fe0ba6c88c3029cba5648e329c9ad"} Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.777152 4652 scope.go:117] "RemoveContainer" containerID="9b7f311a8faac7a447c8090ed4c737822c82df8d6ceddf87d7f17b2b8eb892ed" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.778072 4652 generic.go:334] "Generic (PLEG): container finished" podID="5b497345-d12c-424e-9a3a-6f64260ff215" containerID="37af19b6cf54b6cc05cc4edf0f26ba7b4477f1ca076a54faac4627da1be7a68b" exitCode=0 Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.778120 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9ddrq-config-69rfz" event={"ID":"5b497345-d12c-424e-9a3a-6f64260ff215","Type":"ContainerDied","Data":"37af19b6cf54b6cc05cc4edf0f26ba7b4477f1ca076a54faac4627da1be7a68b"} Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.778145 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9ddrq-config-69rfz" event={"ID":"5b497345-d12c-424e-9a3a-6f64260ff215","Type":"ContainerStarted","Data":"b9a64bc521fb1b669b9ec2512d48c51b859d914ad194abe41a4ae83eac416723"} Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.789964 4652 scope.go:117] "RemoveContainer" containerID="8df2150e5095790438c0007af828c4530b07f0b9601d520c9a28c6297a267826" Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.809176 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-kmsw2"] Dec 05 05:43:38 crc kubenswrapper[4652]: I1205 05:43:38.814433 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76f9c4c8bc-kmsw2"] Dec 05 05:43:39 crc kubenswrapper[4652]: E1205 05:43:39.136199 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330ff405_1db6_4136_b17b_679168d3125b.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.002524 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098139 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-log-ovn\") pod \"5b497345-d12c-424e-9a3a-6f64260ff215\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098241 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "5b497345-d12c-424e-9a3a-6f64260ff215" (UID: "5b497345-d12c-424e-9a3a-6f64260ff215"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098283 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-scripts\") pod \"5b497345-d12c-424e-9a3a-6f64260ff215\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098313 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run\") pod \"5b497345-d12c-424e-9a3a-6f64260ff215\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098354 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gt29\" (UniqueName: \"kubernetes.io/projected/5b497345-d12c-424e-9a3a-6f64260ff215-kube-api-access-7gt29\") pod \"5b497345-d12c-424e-9a3a-6f64260ff215\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098394 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run" (OuterVolumeSpecName: "var-run") pod "5b497345-d12c-424e-9a3a-6f64260ff215" (UID: "5b497345-d12c-424e-9a3a-6f64260ff215"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098417 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run-ovn\") pod \"5b497345-d12c-424e-9a3a-6f64260ff215\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098462 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-additional-scripts\") pod \"5b497345-d12c-424e-9a3a-6f64260ff215\" (UID: \"5b497345-d12c-424e-9a3a-6f64260ff215\") " Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098868 4652 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.098886 4652 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.099081 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "5b497345-d12c-424e-9a3a-6f64260ff215" (UID: "5b497345-d12c-424e-9a3a-6f64260ff215"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.099140 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "5b497345-d12c-424e-9a3a-6f64260ff215" (UID: "5b497345-d12c-424e-9a3a-6f64260ff215"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.099248 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-scripts" (OuterVolumeSpecName: "scripts") pod "5b497345-d12c-424e-9a3a-6f64260ff215" (UID: "5b497345-d12c-424e-9a3a-6f64260ff215"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.102948 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b497345-d12c-424e-9a3a-6f64260ff215-kube-api-access-7gt29" (OuterVolumeSpecName: "kube-api-access-7gt29") pod "5b497345-d12c-424e-9a3a-6f64260ff215" (UID: "5b497345-d12c-424e-9a3a-6f64260ff215"). InnerVolumeSpecName "kube-api-access-7gt29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.132654 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="330ff405-1db6-4136-b17b-679168d3125b" path="/var/lib/kubelet/pods/330ff405-1db6-4136-b17b-679168d3125b/volumes" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.199817 4652 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5b497345-d12c-424e-9a3a-6f64260ff215-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.199840 4652 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.199851 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b497345-d12c-424e-9a3a-6f64260ff215-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.199859 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gt29\" (UniqueName: \"kubernetes.io/projected/5b497345-d12c-424e-9a3a-6f64260ff215-kube-api-access-7gt29\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.791682 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9ddrq-config-69rfz" event={"ID":"5b497345-d12c-424e-9a3a-6f64260ff215","Type":"ContainerDied","Data":"b9a64bc521fb1b669b9ec2512d48c51b859d914ad194abe41a4ae83eac416723"} Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.791718 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9a64bc521fb1b669b9ec2512d48c51b859d914ad194abe41a4ae83eac416723" Dec 05 05:43:40 crc kubenswrapper[4652]: I1205 05:43:40.791722 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq-config-69rfz" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.071389 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-9ddrq-config-69rfz"] Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.075593 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-9ddrq-config-69rfz"] Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.169418 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9ddrq-config-jnc62"] Dec 05 05:43:41 crc kubenswrapper[4652]: E1205 05:43:41.169700 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="330ff405-1db6-4136-b17b-679168d3125b" containerName="init" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.169718 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="330ff405-1db6-4136-b17b-679168d3125b" containerName="init" Dec 05 05:43:41 crc kubenswrapper[4652]: E1205 05:43:41.169733 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b497345-d12c-424e-9a3a-6f64260ff215" containerName="ovn-config" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.169739 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b497345-d12c-424e-9a3a-6f64260ff215" containerName="ovn-config" Dec 05 05:43:41 crc kubenswrapper[4652]: E1205 05:43:41.169766 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="330ff405-1db6-4136-b17b-679168d3125b" containerName="dnsmasq-dns" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.169771 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="330ff405-1db6-4136-b17b-679168d3125b" containerName="dnsmasq-dns" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.169899 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="330ff405-1db6-4136-b17b-679168d3125b" containerName="dnsmasq-dns" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.169916 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b497345-d12c-424e-9a3a-6f64260ff215" containerName="ovn-config" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.170375 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.173039 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.184426 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9ddrq-config-jnc62"] Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.316511 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-scripts\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.316676 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run-ovn\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.316760 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7nh4\" (UniqueName: \"kubernetes.io/projected/7153a716-16fc-49fe-804c-1a1f8e248328-kube-api-access-q7nh4\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.316805 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-log-ovn\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.316853 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-additional-scripts\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.316936 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.418604 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.418733 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-scripts\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.418803 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run-ovn\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.418885 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7nh4\" (UniqueName: \"kubernetes.io/projected/7153a716-16fc-49fe-804c-1a1f8e248328-kube-api-access-q7nh4\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.418921 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-log-ovn\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.418962 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-additional-scripts\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.418973 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.418986 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run-ovn\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.419153 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-log-ovn\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.419601 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-additional-scripts\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.420574 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-scripts\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.439009 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7nh4\" (UniqueName: \"kubernetes.io/projected/7153a716-16fc-49fe-804c-1a1f8e248328-kube-api-access-q7nh4\") pod \"ovn-controller-9ddrq-config-jnc62\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.485512 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.866213 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9ddrq-config-jnc62"] Dec 05 05:43:41 crc kubenswrapper[4652]: W1205 05:43:41.869958 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7153a716_16fc_49fe_804c_1a1f8e248328.slice/crio-bcef23b29915e3807f70e46480449a323ed987907abbce5062b2a79a9ac99bea WatchSource:0}: Error finding container bcef23b29915e3807f70e46480449a323ed987907abbce5062b2a79a9ac99bea: Status 404 returned error can't find the container with id bcef23b29915e3807f70e46480449a323ed987907abbce5062b2a79a9ac99bea Dec 05 05:43:41 crc kubenswrapper[4652]: I1205 05:43:41.944783 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-9ddrq" Dec 05 05:43:42 crc kubenswrapper[4652]: I1205 05:43:42.135148 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b497345-d12c-424e-9a3a-6f64260ff215" path="/var/lib/kubelet/pods/5b497345-d12c-424e-9a3a-6f64260ff215/volumes" Dec 05 05:43:42 crc kubenswrapper[4652]: I1205 05:43:42.806625 4652 generic.go:334] "Generic (PLEG): container finished" podID="7153a716-16fc-49fe-804c-1a1f8e248328" containerID="be42c2b587bcb6fc43f29d4a1645290a25b8152eb4eb887eb9bb13d9e31a7e42" exitCode=0 Dec 05 05:43:42 crc kubenswrapper[4652]: I1205 05:43:42.806671 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9ddrq-config-jnc62" event={"ID":"7153a716-16fc-49fe-804c-1a1f8e248328","Type":"ContainerDied","Data":"be42c2b587bcb6fc43f29d4a1645290a25b8152eb4eb887eb9bb13d9e31a7e42"} Dec 05 05:43:42 crc kubenswrapper[4652]: I1205 05:43:42.806695 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9ddrq-config-jnc62" event={"ID":"7153a716-16fc-49fe-804c-1a1f8e248328","Type":"ContainerStarted","Data":"bcef23b29915e3807f70e46480449a323ed987907abbce5062b2a79a9ac99bea"} Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.087329 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174306 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run\") pod \"7153a716-16fc-49fe-804c-1a1f8e248328\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174396 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run" (OuterVolumeSpecName: "var-run") pod "7153a716-16fc-49fe-804c-1a1f8e248328" (UID: "7153a716-16fc-49fe-804c-1a1f8e248328"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174416 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-scripts\") pod \"7153a716-16fc-49fe-804c-1a1f8e248328\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174443 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-log-ovn\") pod \"7153a716-16fc-49fe-804c-1a1f8e248328\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174523 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-additional-scripts\") pod \"7153a716-16fc-49fe-804c-1a1f8e248328\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174576 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run-ovn\") pod \"7153a716-16fc-49fe-804c-1a1f8e248328\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174589 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "7153a716-16fc-49fe-804c-1a1f8e248328" (UID: "7153a716-16fc-49fe-804c-1a1f8e248328"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174683 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7nh4\" (UniqueName: \"kubernetes.io/projected/7153a716-16fc-49fe-804c-1a1f8e248328-kube-api-access-q7nh4\") pod \"7153a716-16fc-49fe-804c-1a1f8e248328\" (UID: \"7153a716-16fc-49fe-804c-1a1f8e248328\") " Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.174698 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "7153a716-16fc-49fe-804c-1a1f8e248328" (UID: "7153a716-16fc-49fe-804c-1a1f8e248328"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.175158 4652 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.175175 4652 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.175185 4652 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7153a716-16fc-49fe-804c-1a1f8e248328-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.175177 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "7153a716-16fc-49fe-804c-1a1f8e248328" (UID: "7153a716-16fc-49fe-804c-1a1f8e248328"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.175314 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-scripts" (OuterVolumeSpecName: "scripts") pod "7153a716-16fc-49fe-804c-1a1f8e248328" (UID: "7153a716-16fc-49fe-804c-1a1f8e248328"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.179804 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7153a716-16fc-49fe-804c-1a1f8e248328-kube-api-access-q7nh4" (OuterVolumeSpecName: "kube-api-access-q7nh4") pod "7153a716-16fc-49fe-804c-1a1f8e248328" (UID: "7153a716-16fc-49fe-804c-1a1f8e248328"). InnerVolumeSpecName "kube-api-access-q7nh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.276219 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7nh4\" (UniqueName: \"kubernetes.io/projected/7153a716-16fc-49fe-804c-1a1f8e248328-kube-api-access-q7nh4\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.276247 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.276257 4652 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7153a716-16fc-49fe-804c-1a1f8e248328-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.823811 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9ddrq-config-jnc62" event={"ID":"7153a716-16fc-49fe-804c-1a1f8e248328","Type":"ContainerDied","Data":"bcef23b29915e3807f70e46480449a323ed987907abbce5062b2a79a9ac99bea"} Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.824133 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bcef23b29915e3807f70e46480449a323ed987907abbce5062b2a79a9ac99bea" Dec 05 05:43:44 crc kubenswrapper[4652]: I1205 05:43:44.823858 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9ddrq-config-jnc62" Dec 05 05:43:45 crc kubenswrapper[4652]: I1205 05:43:45.140752 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-9ddrq-config-jnc62"] Dec 05 05:43:45 crc kubenswrapper[4652]: I1205 05:43:45.146708 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-9ddrq-config-jnc62"] Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.132710 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7153a716-16fc-49fe-804c-1a1f8e248328" path="/var/lib/kubelet/pods/7153a716-16fc-49fe-804c-1a1f8e248328/volumes" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.557742 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.780107 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-6rksq"] Dec 05 05:43:46 crc kubenswrapper[4652]: E1205 05:43:46.780436 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7153a716-16fc-49fe-804c-1a1f8e248328" containerName="ovn-config" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.780456 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="7153a716-16fc-49fe-804c-1a1f8e248328" containerName="ovn-config" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.780671 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="7153a716-16fc-49fe-804c-1a1f8e248328" containerName="ovn-config" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.781189 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.787565 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6rksq"] Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.814387 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.903486 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-6p9w6"] Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.904590 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.914362 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-operator-scripts\") pod \"barbican-db-create-6p9w6\" (UID: \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\") " pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.914430 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh7zm\" (UniqueName: \"kubernetes.io/projected/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-kube-api-access-fh7zm\") pod \"barbican-db-create-6p9w6\" (UID: \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\") " pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.914457 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f27cfa6f-024e-4274-9788-11b4d959f23b-operator-scripts\") pod \"cinder-db-create-6rksq\" (UID: \"f27cfa6f-024e-4274-9788-11b4d959f23b\") " pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.914477 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv7jd\" (UniqueName: \"kubernetes.io/projected/f27cfa6f-024e-4274-9788-11b4d959f23b-kube-api-access-dv7jd\") pod \"cinder-db-create-6rksq\" (UID: \"f27cfa6f-024e-4274-9788-11b4d959f23b\") " pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.924754 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1b24-account-create-update-9xqkv"] Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.925916 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.930924 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6p9w6"] Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.932997 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.933093 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1b24-account-create-update-9xqkv"] Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.990132 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-d6b4-account-create-update-m556k"] Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.991171 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:46 crc kubenswrapper[4652]: I1205 05:43:46.993910 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.000135 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-d6b4-account-create-update-m556k"] Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.015427 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-operator-scripts\") pod \"barbican-db-create-6p9w6\" (UID: \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\") " pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.015516 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh7zm\" (UniqueName: \"kubernetes.io/projected/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-kube-api-access-fh7zm\") pod \"barbican-db-create-6p9w6\" (UID: \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\") " pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.015570 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f27cfa6f-024e-4274-9788-11b4d959f23b-operator-scripts\") pod \"cinder-db-create-6rksq\" (UID: \"f27cfa6f-024e-4274-9788-11b4d959f23b\") " pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.015598 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv7jd\" (UniqueName: \"kubernetes.io/projected/f27cfa6f-024e-4274-9788-11b4d959f23b-kube-api-access-dv7jd\") pod \"cinder-db-create-6rksq\" (UID: \"f27cfa6f-024e-4274-9788-11b4d959f23b\") " pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.016954 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f27cfa6f-024e-4274-9788-11b4d959f23b-operator-scripts\") pod \"cinder-db-create-6rksq\" (UID: \"f27cfa6f-024e-4274-9788-11b4d959f23b\") " pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.016965 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-operator-scripts\") pod \"barbican-db-create-6p9w6\" (UID: \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\") " pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.036153 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv7jd\" (UniqueName: \"kubernetes.io/projected/f27cfa6f-024e-4274-9788-11b4d959f23b-kube-api-access-dv7jd\") pod \"cinder-db-create-6rksq\" (UID: \"f27cfa6f-024e-4274-9788-11b4d959f23b\") " pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.036857 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh7zm\" (UniqueName: \"kubernetes.io/projected/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-kube-api-access-fh7zm\") pod \"barbican-db-create-6p9w6\" (UID: \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\") " pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.073633 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-notifications-server-0" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.106134 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.121683 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-operator-scripts\") pod \"barbican-d6b4-account-create-update-m556k\" (UID: \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\") " pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.121750 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md8fk\" (UniqueName: \"kubernetes.io/projected/e96d2f64-5044-4f9a-908d-1f31671b7ee5-kube-api-access-md8fk\") pod \"cinder-1b24-account-create-update-9xqkv\" (UID: \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\") " pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.121773 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e96d2f64-5044-4f9a-908d-1f31671b7ee5-operator-scripts\") pod \"cinder-1b24-account-create-update-9xqkv\" (UID: \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\") " pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.121833 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf6t7\" (UniqueName: \"kubernetes.io/projected/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-kube-api-access-cf6t7\") pod \"barbican-d6b4-account-create-update-m556k\" (UID: \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\") " pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.224107 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-operator-scripts\") pod \"barbican-d6b4-account-create-update-m556k\" (UID: \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\") " pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.224256 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md8fk\" (UniqueName: \"kubernetes.io/projected/e96d2f64-5044-4f9a-908d-1f31671b7ee5-kube-api-access-md8fk\") pod \"cinder-1b24-account-create-update-9xqkv\" (UID: \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\") " pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.224282 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e96d2f64-5044-4f9a-908d-1f31671b7ee5-operator-scripts\") pod \"cinder-1b24-account-create-update-9xqkv\" (UID: \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\") " pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.224477 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf6t7\" (UniqueName: \"kubernetes.io/projected/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-kube-api-access-cf6t7\") pod \"barbican-d6b4-account-create-update-m556k\" (UID: \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\") " pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.224780 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-operator-scripts\") pod \"barbican-d6b4-account-create-update-m556k\" (UID: \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\") " pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.224887 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e96d2f64-5044-4f9a-908d-1f31671b7ee5-operator-scripts\") pod \"cinder-1b24-account-create-update-9xqkv\" (UID: \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\") " pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.240614 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.241491 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-qqgfk"] Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.243570 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.262931 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.263094 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.263205 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-h2hbv" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.263265 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md8fk\" (UniqueName: \"kubernetes.io/projected/e96d2f64-5044-4f9a-908d-1f31671b7ee5-kube-api-access-md8fk\") pod \"cinder-1b24-account-create-update-9xqkv\" (UID: \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\") " pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.263381 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.269375 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qqgfk"] Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.295821 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf6t7\" (UniqueName: \"kubernetes.io/projected/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-kube-api-access-cf6t7\") pod \"barbican-d6b4-account-create-update-m556k\" (UID: \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\") " pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.318863 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.326053 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-combined-ca-bundle\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.326262 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-config-data\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.326309 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hf7g\" (UniqueName: \"kubernetes.io/projected/e26f824e-a877-4436-bca3-8ecdb1d1a73c-kube-api-access-4hf7g\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.428414 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-combined-ca-bundle\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.428562 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-config-data\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.428600 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hf7g\" (UniqueName: \"kubernetes.io/projected/e26f824e-a877-4436-bca3-8ecdb1d1a73c-kube-api-access-4hf7g\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.432327 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-combined-ca-bundle\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.433201 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-config-data\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.455766 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hf7g\" (UniqueName: \"kubernetes.io/projected/e26f824e-a877-4436-bca3-8ecdb1d1a73c-kube-api-access-4hf7g\") pod \"keystone-db-sync-qqgfk\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.545033 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.595411 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.757087 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6rksq"] Dec 05 05:43:47 crc kubenswrapper[4652]: W1205 05:43:47.762332 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf27cfa6f_024e_4274_9788_11b4d959f23b.slice/crio-35c2dfe6aa4853d4c99cebb4585d901717ad6c84ba1d37679bfd3ad0f33c002b WatchSource:0}: Error finding container 35c2dfe6aa4853d4c99cebb4585d901717ad6c84ba1d37679bfd3ad0f33c002b: Status 404 returned error can't find the container with id 35c2dfe6aa4853d4c99cebb4585d901717ad6c84ba1d37679bfd3ad0f33c002b Dec 05 05:43:47 crc kubenswrapper[4652]: W1205 05:43:47.846835 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod296a1c86_330b_46f9_9ae3_53b42c2e6cb8.slice/crio-8fde9c6855bcd1f1ecb38ffb4297f62af388a206aa98a0d74cfd600c674fc84f WatchSource:0}: Error finding container 8fde9c6855bcd1f1ecb38ffb4297f62af388a206aa98a0d74cfd600c674fc84f: Status 404 returned error can't find the container with id 8fde9c6855bcd1f1ecb38ffb4297f62af388a206aa98a0d74cfd600c674fc84f Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.848333 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-d6b4-account-create-update-m556k"] Dec 05 05:43:47 crc kubenswrapper[4652]: W1205 05:43:47.862896 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod875f6fb5_17e4_4ebf_bb40_7cecca7662ae.slice/crio-3e279a2fe51324065a6491738e0b19cf45eee1b52f0569f57632d5153b94da49 WatchSource:0}: Error finding container 3e279a2fe51324065a6491738e0b19cf45eee1b52f0569f57632d5153b94da49: Status 404 returned error can't find the container with id 3e279a2fe51324065a6491738e0b19cf45eee1b52f0569f57632d5153b94da49 Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.863291 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6p9w6"] Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.867938 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6rksq" event={"ID":"f27cfa6f-024e-4274-9788-11b4d959f23b","Type":"ContainerStarted","Data":"35c2dfe6aa4853d4c99cebb4585d901717ad6c84ba1d37679bfd3ad0f33c002b"} Dec 05 05:43:47 crc kubenswrapper[4652]: I1205 05:43:47.981452 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1b24-account-create-update-9xqkv"] Dec 05 05:43:47 crc kubenswrapper[4652]: W1205 05:43:47.983795 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode96d2f64_5044_4f9a_908d_1f31671b7ee5.slice/crio-335f07e12ae1da2a382dfcc523fa0ee29027d31afd1489727c878406a7d39d4a WatchSource:0}: Error finding container 335f07e12ae1da2a382dfcc523fa0ee29027d31afd1489727c878406a7d39d4a: Status 404 returned error can't find the container with id 335f07e12ae1da2a382dfcc523fa0ee29027d31afd1489727c878406a7d39d4a Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.066847 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qqgfk"] Dec 05 05:43:48 crc kubenswrapper[4652]: W1205 05:43:48.069913 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode26f824e_a877_4436_bca3_8ecdb1d1a73c.slice/crio-de796c25102336e6cc667ffacf800eb6e5768487eeaa28fc522348b33e7e0792 WatchSource:0}: Error finding container de796c25102336e6cc667ffacf800eb6e5768487eeaa28fc522348b33e7e0792: Status 404 returned error can't find the container with id de796c25102336e6cc667ffacf800eb6e5768487eeaa28fc522348b33e7e0792 Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.877200 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qqgfk" event={"ID":"e26f824e-a877-4436-bca3-8ecdb1d1a73c","Type":"ContainerStarted","Data":"de796c25102336e6cc667ffacf800eb6e5768487eeaa28fc522348b33e7e0792"} Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.880613 4652 generic.go:334] "Generic (PLEG): container finished" podID="f27cfa6f-024e-4274-9788-11b4d959f23b" containerID="734da4c19af18d37037b644049892190f897bf593ca0e8704a1a412ecba05d67" exitCode=0 Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.880682 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6rksq" event={"ID":"f27cfa6f-024e-4274-9788-11b4d959f23b","Type":"ContainerDied","Data":"734da4c19af18d37037b644049892190f897bf593ca0e8704a1a412ecba05d67"} Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.888001 4652 generic.go:334] "Generic (PLEG): container finished" podID="875f6fb5-17e4-4ebf-bb40-7cecca7662ae" containerID="e3ed6116a8553d565b9fb4a1ae29030bf059c99a97f7bbe6edce868745878467" exitCode=0 Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.888075 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6p9w6" event={"ID":"875f6fb5-17e4-4ebf-bb40-7cecca7662ae","Type":"ContainerDied","Data":"e3ed6116a8553d565b9fb4a1ae29030bf059c99a97f7bbe6edce868745878467"} Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.888108 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6p9w6" event={"ID":"875f6fb5-17e4-4ebf-bb40-7cecca7662ae","Type":"ContainerStarted","Data":"3e279a2fe51324065a6491738e0b19cf45eee1b52f0569f57632d5153b94da49"} Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.892608 4652 generic.go:334] "Generic (PLEG): container finished" podID="e96d2f64-5044-4f9a-908d-1f31671b7ee5" containerID="96330fc777e64b0833ca0608897222452c2148036c2941fc7d4d3a5924ccbc59" exitCode=0 Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.892790 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1b24-account-create-update-9xqkv" event={"ID":"e96d2f64-5044-4f9a-908d-1f31671b7ee5","Type":"ContainerDied","Data":"96330fc777e64b0833ca0608897222452c2148036c2941fc7d4d3a5924ccbc59"} Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.892848 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1b24-account-create-update-9xqkv" event={"ID":"e96d2f64-5044-4f9a-908d-1f31671b7ee5","Type":"ContainerStarted","Data":"335f07e12ae1da2a382dfcc523fa0ee29027d31afd1489727c878406a7d39d4a"} Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.895875 4652 generic.go:334] "Generic (PLEG): container finished" podID="296a1c86-330b-46f9-9ae3-53b42c2e6cb8" containerID="c42c033d68265a364046ab3458f9e00eecd253cd56c7283ab00c497fbea7581d" exitCode=0 Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.895934 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-d6b4-account-create-update-m556k" event={"ID":"296a1c86-330b-46f9-9ae3-53b42c2e6cb8","Type":"ContainerDied","Data":"c42c033d68265a364046ab3458f9e00eecd253cd56c7283ab00c497fbea7581d"} Dec 05 05:43:48 crc kubenswrapper[4652]: I1205 05:43:48.895982 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-d6b4-account-create-update-m556k" event={"ID":"296a1c86-330b-46f9-9ae3-53b42c2e6cb8","Type":"ContainerStarted","Data":"8fde9c6855bcd1f1ecb38ffb4297f62af388a206aa98a0d74cfd600c674fc84f"} Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.008482 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-g4pq9"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.009421 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.015045 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-g4pq9"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.044297 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-kt5bg"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.045229 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.046675 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.053967 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-qz5nm" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.054697 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-kt5bg"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.126492 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-0544-account-create-update-6ht5w"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.127496 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.128666 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.134465 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-0544-account-create-update-6ht5w"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.152390 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-config-data\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.152692 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efdedbbd-400f-4cbe-810b-ee45f74678ce-operator-scripts\") pod \"glance-db-create-g4pq9\" (UID: \"efdedbbd-400f-4cbe-810b-ee45f74678ce\") " pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.152715 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-combined-ca-bundle\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.152759 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-db-sync-config-data\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.152886 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzx2s\" (UniqueName: \"kubernetes.io/projected/efdedbbd-400f-4cbe-810b-ee45f74678ce-kube-api-access-lzx2s\") pod \"glance-db-create-g4pq9\" (UID: \"efdedbbd-400f-4cbe-810b-ee45f74678ce\") " pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.152932 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8lqz\" (UniqueName: \"kubernetes.io/projected/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-kube-api-access-j8lqz\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.215185 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-7n6qs"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.216232 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.223061 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-7n6qs"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.253788 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-db-sync-config-data\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.253869 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzx2s\" (UniqueName: \"kubernetes.io/projected/efdedbbd-400f-4cbe-810b-ee45f74678ce-kube-api-access-lzx2s\") pod \"glance-db-create-g4pq9\" (UID: \"efdedbbd-400f-4cbe-810b-ee45f74678ce\") " pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.253910 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8lqz\" (UniqueName: \"kubernetes.io/projected/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-kube-api-access-j8lqz\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.253936 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48mch\" (UniqueName: \"kubernetes.io/projected/3ce2513b-6436-4167-8796-3769eb3cba5e-kube-api-access-48mch\") pod \"glance-0544-account-create-update-6ht5w\" (UID: \"3ce2513b-6436-4167-8796-3769eb3cba5e\") " pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.253957 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ce2513b-6436-4167-8796-3769eb3cba5e-operator-scripts\") pod \"glance-0544-account-create-update-6ht5w\" (UID: \"3ce2513b-6436-4167-8796-3769eb3cba5e\") " pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.253990 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-config-data\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.254025 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efdedbbd-400f-4cbe-810b-ee45f74678ce-operator-scripts\") pod \"glance-db-create-g4pq9\" (UID: \"efdedbbd-400f-4cbe-810b-ee45f74678ce\") " pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.254039 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-combined-ca-bundle\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.257512 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efdedbbd-400f-4cbe-810b-ee45f74678ce-operator-scripts\") pod \"glance-db-create-g4pq9\" (UID: \"efdedbbd-400f-4cbe-810b-ee45f74678ce\") " pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.268267 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-combined-ca-bundle\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.269208 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-db-sync-config-data\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.269667 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-config-data\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.270639 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8lqz\" (UniqueName: \"kubernetes.io/projected/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-kube-api-access-j8lqz\") pod \"watcher-db-sync-kt5bg\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.275510 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzx2s\" (UniqueName: \"kubernetes.io/projected/efdedbbd-400f-4cbe-810b-ee45f74678ce-kube-api-access-lzx2s\") pod \"glance-db-create-g4pq9\" (UID: \"efdedbbd-400f-4cbe-810b-ee45f74678ce\") " pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.321701 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.333977 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-3c89-account-create-update-rctff"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.335126 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.340327 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.343096 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3c89-account-create-update-rctff"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.355761 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-operator-scripts\") pod \"neutron-db-create-7n6qs\" (UID: \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\") " pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.355859 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ndtl\" (UniqueName: \"kubernetes.io/projected/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-kube-api-access-5ndtl\") pod \"neutron-db-create-7n6qs\" (UID: \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\") " pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.355984 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48mch\" (UniqueName: \"kubernetes.io/projected/3ce2513b-6436-4167-8796-3769eb3cba5e-kube-api-access-48mch\") pod \"glance-0544-account-create-update-6ht5w\" (UID: \"3ce2513b-6436-4167-8796-3769eb3cba5e\") " pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.356028 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ce2513b-6436-4167-8796-3769eb3cba5e-operator-scripts\") pod \"glance-0544-account-create-update-6ht5w\" (UID: \"3ce2513b-6436-4167-8796-3769eb3cba5e\") " pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.356057 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.357061 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ce2513b-6436-4167-8796-3769eb3cba5e-operator-scripts\") pod \"glance-0544-account-create-update-6ht5w\" (UID: \"3ce2513b-6436-4167-8796-3769eb3cba5e\") " pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.371331 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48mch\" (UniqueName: \"kubernetes.io/projected/3ce2513b-6436-4167-8796-3769eb3cba5e-kube-api-access-48mch\") pod \"glance-0544-account-create-update-6ht5w\" (UID: \"3ce2513b-6436-4167-8796-3769eb3cba5e\") " pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:49 crc kubenswrapper[4652]: E1205 05:43:49.399722 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330ff405_1db6_4136_b17b_679168d3125b.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.440074 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.457830 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-operator-scripts\") pod \"neutron-db-create-7n6qs\" (UID: \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\") " pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.457870 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w9pk\" (UniqueName: \"kubernetes.io/projected/ebe437ed-e42a-41b6-a50c-9678c3807f8c-kube-api-access-5w9pk\") pod \"neutron-3c89-account-create-update-rctff\" (UID: \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\") " pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.457897 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe437ed-e42a-41b6-a50c-9678c3807f8c-operator-scripts\") pod \"neutron-3c89-account-create-update-rctff\" (UID: \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\") " pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.457982 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ndtl\" (UniqueName: \"kubernetes.io/projected/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-kube-api-access-5ndtl\") pod \"neutron-db-create-7n6qs\" (UID: \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\") " pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.458712 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-operator-scripts\") pod \"neutron-db-create-7n6qs\" (UID: \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\") " pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.482444 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ndtl\" (UniqueName: \"kubernetes.io/projected/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-kube-api-access-5ndtl\") pod \"neutron-db-create-7n6qs\" (UID: \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\") " pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.534143 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.560485 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w9pk\" (UniqueName: \"kubernetes.io/projected/ebe437ed-e42a-41b6-a50c-9678c3807f8c-kube-api-access-5w9pk\") pod \"neutron-3c89-account-create-update-rctff\" (UID: \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\") " pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.560526 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe437ed-e42a-41b6-a50c-9678c3807f8c-operator-scripts\") pod \"neutron-3c89-account-create-update-rctff\" (UID: \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\") " pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.561104 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe437ed-e42a-41b6-a50c-9678c3807f8c-operator-scripts\") pod \"neutron-3c89-account-create-update-rctff\" (UID: \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\") " pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.597964 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w9pk\" (UniqueName: \"kubernetes.io/projected/ebe437ed-e42a-41b6-a50c-9678c3807f8c-kube-api-access-5w9pk\") pod \"neutron-3c89-account-create-update-rctff\" (UID: \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\") " pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.661906 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-g4pq9"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.711118 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-kt5bg"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.731220 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.747866 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-0544-account-create-update-6ht5w"] Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.920600 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-kt5bg" event={"ID":"dac210ca-32b5-43af-b85e-4eb7ae57e9d5","Type":"ContainerStarted","Data":"cf2cd05c4bb3dd15df18aca28a24cc9eeea0168944b69929d659790e9d0f9dcc"} Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.924823 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g4pq9" event={"ID":"efdedbbd-400f-4cbe-810b-ee45f74678ce","Type":"ContainerStarted","Data":"cb5be2d2c84f876e7ccc1277ecdf62e96bf1d2f00169d5521a41420418deeb17"} Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.924863 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g4pq9" event={"ID":"efdedbbd-400f-4cbe-810b-ee45f74678ce","Type":"ContainerStarted","Data":"6bd79d00f5b3b24a1ee196735aee636b3046425a4eb78e9e07872483d328587c"} Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.926182 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0544-account-create-update-6ht5w" event={"ID":"3ce2513b-6436-4167-8796-3769eb3cba5e","Type":"ContainerStarted","Data":"349a90b051763cb2c7c69c201f092f397998be98f674135b7d77659291a7a58f"} Dec 05 05:43:49 crc kubenswrapper[4652]: I1205 05:43:49.941907 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-g4pq9" podStartSLOduration=1.941892305 podStartE2EDuration="1.941892305s" podCreationTimestamp="2025-12-05 05:43:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:43:49.940969883 +0000 UTC m=+1032.177700149" watchObservedRunningTime="2025-12-05 05:43:49.941892305 +0000 UTC m=+1032.178622572" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.174507 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-7n6qs"] Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.277210 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-3c89-account-create-update-rctff"] Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.544205 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.562382 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.577832 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.585180 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.687738 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e96d2f64-5044-4f9a-908d-1f31671b7ee5-operator-scripts\") pod \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\" (UID: \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\") " Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.687888 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-operator-scripts\") pod \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\" (UID: \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\") " Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.687919 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-operator-scripts\") pod \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\" (UID: \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\") " Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.687952 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf6t7\" (UniqueName: \"kubernetes.io/projected/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-kube-api-access-cf6t7\") pod \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\" (UID: \"296a1c86-330b-46f9-9ae3-53b42c2e6cb8\") " Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.687974 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md8fk\" (UniqueName: \"kubernetes.io/projected/e96d2f64-5044-4f9a-908d-1f31671b7ee5-kube-api-access-md8fk\") pod \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\" (UID: \"e96d2f64-5044-4f9a-908d-1f31671b7ee5\") " Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.688034 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh7zm\" (UniqueName: \"kubernetes.io/projected/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-kube-api-access-fh7zm\") pod \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\" (UID: \"875f6fb5-17e4-4ebf-bb40-7cecca7662ae\") " Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.688060 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv7jd\" (UniqueName: \"kubernetes.io/projected/f27cfa6f-024e-4274-9788-11b4d959f23b-kube-api-access-dv7jd\") pod \"f27cfa6f-024e-4274-9788-11b4d959f23b\" (UID: \"f27cfa6f-024e-4274-9788-11b4d959f23b\") " Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.688075 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f27cfa6f-024e-4274-9788-11b4d959f23b-operator-scripts\") pod \"f27cfa6f-024e-4274-9788-11b4d959f23b\" (UID: \"f27cfa6f-024e-4274-9788-11b4d959f23b\") " Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.688363 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e96d2f64-5044-4f9a-908d-1f31671b7ee5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e96d2f64-5044-4f9a-908d-1f31671b7ee5" (UID: "e96d2f64-5044-4f9a-908d-1f31671b7ee5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.688411 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "875f6fb5-17e4-4ebf-bb40-7cecca7662ae" (UID: "875f6fb5-17e4-4ebf-bb40-7cecca7662ae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.688748 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f27cfa6f-024e-4274-9788-11b4d959f23b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f27cfa6f-024e-4274-9788-11b4d959f23b" (UID: "f27cfa6f-024e-4274-9788-11b4d959f23b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.689239 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "296a1c86-330b-46f9-9ae3-53b42c2e6cb8" (UID: "296a1c86-330b-46f9-9ae3-53b42c2e6cb8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.694519 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-kube-api-access-cf6t7" (OuterVolumeSpecName: "kube-api-access-cf6t7") pod "296a1c86-330b-46f9-9ae3-53b42c2e6cb8" (UID: "296a1c86-330b-46f9-9ae3-53b42c2e6cb8"). InnerVolumeSpecName "kube-api-access-cf6t7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.694577 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f27cfa6f-024e-4274-9788-11b4d959f23b-kube-api-access-dv7jd" (OuterVolumeSpecName: "kube-api-access-dv7jd") pod "f27cfa6f-024e-4274-9788-11b4d959f23b" (UID: "f27cfa6f-024e-4274-9788-11b4d959f23b"). InnerVolumeSpecName "kube-api-access-dv7jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.694602 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-kube-api-access-fh7zm" (OuterVolumeSpecName: "kube-api-access-fh7zm") pod "875f6fb5-17e4-4ebf-bb40-7cecca7662ae" (UID: "875f6fb5-17e4-4ebf-bb40-7cecca7662ae"). InnerVolumeSpecName "kube-api-access-fh7zm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.695844 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e96d2f64-5044-4f9a-908d-1f31671b7ee5-kube-api-access-md8fk" (OuterVolumeSpecName: "kube-api-access-md8fk") pod "e96d2f64-5044-4f9a-908d-1f31671b7ee5" (UID: "e96d2f64-5044-4f9a-908d-1f31671b7ee5"). InnerVolumeSpecName "kube-api-access-md8fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.790062 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.790098 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.790111 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf6t7\" (UniqueName: \"kubernetes.io/projected/296a1c86-330b-46f9-9ae3-53b42c2e6cb8-kube-api-access-cf6t7\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.790123 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md8fk\" (UniqueName: \"kubernetes.io/projected/e96d2f64-5044-4f9a-908d-1f31671b7ee5-kube-api-access-md8fk\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.790132 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh7zm\" (UniqueName: \"kubernetes.io/projected/875f6fb5-17e4-4ebf-bb40-7cecca7662ae-kube-api-access-fh7zm\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.790140 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv7jd\" (UniqueName: \"kubernetes.io/projected/f27cfa6f-024e-4274-9788-11b4d959f23b-kube-api-access-dv7jd\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.790148 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f27cfa6f-024e-4274-9788-11b4d959f23b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.790155 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e96d2f64-5044-4f9a-908d-1f31671b7ee5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.936282 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-d6b4-account-create-update-m556k" event={"ID":"296a1c86-330b-46f9-9ae3-53b42c2e6cb8","Type":"ContainerDied","Data":"8fde9c6855bcd1f1ecb38ffb4297f62af388a206aa98a0d74cfd600c674fc84f"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.936630 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fde9c6855bcd1f1ecb38ffb4297f62af388a206aa98a0d74cfd600c674fc84f" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.936307 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-d6b4-account-create-update-m556k" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.940332 4652 generic.go:334] "Generic (PLEG): container finished" podID="f6002596-a7c4-4efc-b162-4bdcc1cf63b8" containerID="c0b012eb8c3df1d4d7870965f6902a4bab7e40c41d981de93b70a5d2402a39ad" exitCode=0 Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.940460 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7n6qs" event={"ID":"f6002596-a7c4-4efc-b162-4bdcc1cf63b8","Type":"ContainerDied","Data":"c0b012eb8c3df1d4d7870965f6902a4bab7e40c41d981de93b70a5d2402a39ad"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.940574 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7n6qs" event={"ID":"f6002596-a7c4-4efc-b162-4bdcc1cf63b8","Type":"ContainerStarted","Data":"64c8ad584d7ed78a33a6f5b80a49c877c837e56da872d5c9a0a166f57067b173"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.942480 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6rksq" event={"ID":"f27cfa6f-024e-4274-9788-11b4d959f23b","Type":"ContainerDied","Data":"35c2dfe6aa4853d4c99cebb4585d901717ad6c84ba1d37679bfd3ad0f33c002b"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.942518 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35c2dfe6aa4853d4c99cebb4585d901717ad6c84ba1d37679bfd3ad0f33c002b" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.942571 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6rksq" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.948268 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6p9w6" event={"ID":"875f6fb5-17e4-4ebf-bb40-7cecca7662ae","Type":"ContainerDied","Data":"3e279a2fe51324065a6491738e0b19cf45eee1b52f0569f57632d5153b94da49"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.948295 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6p9w6" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.948298 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e279a2fe51324065a6491738e0b19cf45eee1b52f0569f57632d5153b94da49" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.949699 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1b24-account-create-update-9xqkv" event={"ID":"e96d2f64-5044-4f9a-908d-1f31671b7ee5","Type":"ContainerDied","Data":"335f07e12ae1da2a382dfcc523fa0ee29027d31afd1489727c878406a7d39d4a"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.949725 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="335f07e12ae1da2a382dfcc523fa0ee29027d31afd1489727c878406a7d39d4a" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.949707 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1b24-account-create-update-9xqkv" Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.952140 4652 generic.go:334] "Generic (PLEG): container finished" podID="efdedbbd-400f-4cbe-810b-ee45f74678ce" containerID="cb5be2d2c84f876e7ccc1277ecdf62e96bf1d2f00169d5521a41420418deeb17" exitCode=0 Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.952197 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g4pq9" event={"ID":"efdedbbd-400f-4cbe-810b-ee45f74678ce","Type":"ContainerDied","Data":"cb5be2d2c84f876e7ccc1277ecdf62e96bf1d2f00169d5521a41420418deeb17"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.953588 4652 generic.go:334] "Generic (PLEG): container finished" podID="ebe437ed-e42a-41b6-a50c-9678c3807f8c" containerID="bfe0566958b046c824313ca92d235673089c8b3679c8dba3f106865946349d95" exitCode=0 Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.953632 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3c89-account-create-update-rctff" event={"ID":"ebe437ed-e42a-41b6-a50c-9678c3807f8c","Type":"ContainerDied","Data":"bfe0566958b046c824313ca92d235673089c8b3679c8dba3f106865946349d95"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.953660 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3c89-account-create-update-rctff" event={"ID":"ebe437ed-e42a-41b6-a50c-9678c3807f8c","Type":"ContainerStarted","Data":"4d09c19861c101285c28bb5aef6fade8c41bec9e25525681e376042ff2f02477"} Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.954829 4652 generic.go:334] "Generic (PLEG): container finished" podID="3ce2513b-6436-4167-8796-3769eb3cba5e" containerID="9ce137261b4e7f177aea5210c74fbccafb01bc775eac6725564ca2e7f0580511" exitCode=0 Dec 05 05:43:50 crc kubenswrapper[4652]: I1205 05:43:50.954861 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0544-account-create-update-6ht5w" event={"ID":"3ce2513b-6436-4167-8796-3769eb3cba5e","Type":"ContainerDied","Data":"9ce137261b4e7f177aea5210c74fbccafb01bc775eac6725564ca2e7f0580511"} Dec 05 05:43:53 crc kubenswrapper[4652]: I1205 05:43:53.992496 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g4pq9" event={"ID":"efdedbbd-400f-4cbe-810b-ee45f74678ce","Type":"ContainerDied","Data":"6bd79d00f5b3b24a1ee196735aee636b3046425a4eb78e9e07872483d328587c"} Dec 05 05:43:53 crc kubenswrapper[4652]: I1205 05:43:53.992918 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6bd79d00f5b3b24a1ee196735aee636b3046425a4eb78e9e07872483d328587c" Dec 05 05:43:53 crc kubenswrapper[4652]: I1205 05:43:53.994504 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-0544-account-create-update-6ht5w" event={"ID":"3ce2513b-6436-4167-8796-3769eb3cba5e","Type":"ContainerDied","Data":"349a90b051763cb2c7c69c201f092f397998be98f674135b7d77659291a7a58f"} Dec 05 05:43:53 crc kubenswrapper[4652]: I1205 05:43:53.994902 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="349a90b051763cb2c7c69c201f092f397998be98f674135b7d77659291a7a58f" Dec 05 05:43:53 crc kubenswrapper[4652]: I1205 05:43:53.997405 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-3c89-account-create-update-rctff" event={"ID":"ebe437ed-e42a-41b6-a50c-9678c3807f8c","Type":"ContainerDied","Data":"4d09c19861c101285c28bb5aef6fade8c41bec9e25525681e376042ff2f02477"} Dec 05 05:43:53 crc kubenswrapper[4652]: I1205 05:43:53.997424 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d09c19861c101285c28bb5aef6fade8c41bec9e25525681e376042ff2f02477" Dec 05 05:43:53 crc kubenswrapper[4652]: I1205 05:43:53.999485 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7n6qs" event={"ID":"f6002596-a7c4-4efc-b162-4bdcc1cf63b8","Type":"ContainerDied","Data":"64c8ad584d7ed78a33a6f5b80a49c877c837e56da872d5c9a0a166f57067b173"} Dec 05 05:43:53 crc kubenswrapper[4652]: I1205 05:43:53.999516 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64c8ad584d7ed78a33a6f5b80a49c877c837e56da872d5c9a0a166f57067b173" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.076356 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.081200 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.099511 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.111798 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.160060 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-operator-scripts\") pod \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\" (UID: \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\") " Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.160131 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ndtl\" (UniqueName: \"kubernetes.io/projected/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-kube-api-access-5ndtl\") pod \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\" (UID: \"f6002596-a7c4-4efc-b162-4bdcc1cf63b8\") " Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.160299 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe437ed-e42a-41b6-a50c-9678c3807f8c-operator-scripts\") pod \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\" (UID: \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\") " Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.160468 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w9pk\" (UniqueName: \"kubernetes.io/projected/ebe437ed-e42a-41b6-a50c-9678c3807f8c-kube-api-access-5w9pk\") pod \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\" (UID: \"ebe437ed-e42a-41b6-a50c-9678c3807f8c\") " Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.160912 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f6002596-a7c4-4efc-b162-4bdcc1cf63b8" (UID: "f6002596-a7c4-4efc-b162-4bdcc1cf63b8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.161031 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebe437ed-e42a-41b6-a50c-9678c3807f8c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ebe437ed-e42a-41b6-a50c-9678c3807f8c" (UID: "ebe437ed-e42a-41b6-a50c-9678c3807f8c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.163790 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-kube-api-access-5ndtl" (OuterVolumeSpecName: "kube-api-access-5ndtl") pod "f6002596-a7c4-4efc-b162-4bdcc1cf63b8" (UID: "f6002596-a7c4-4efc-b162-4bdcc1cf63b8"). InnerVolumeSpecName "kube-api-access-5ndtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.164149 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebe437ed-e42a-41b6-a50c-9678c3807f8c-kube-api-access-5w9pk" (OuterVolumeSpecName: "kube-api-access-5w9pk") pod "ebe437ed-e42a-41b6-a50c-9678c3807f8c" (UID: "ebe437ed-e42a-41b6-a50c-9678c3807f8c"). InnerVolumeSpecName "kube-api-access-5w9pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.262151 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ce2513b-6436-4167-8796-3769eb3cba5e-operator-scripts\") pod \"3ce2513b-6436-4167-8796-3769eb3cba5e\" (UID: \"3ce2513b-6436-4167-8796-3769eb3cba5e\") " Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.262235 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48mch\" (UniqueName: \"kubernetes.io/projected/3ce2513b-6436-4167-8796-3769eb3cba5e-kube-api-access-48mch\") pod \"3ce2513b-6436-4167-8796-3769eb3cba5e\" (UID: \"3ce2513b-6436-4167-8796-3769eb3cba5e\") " Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.262409 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efdedbbd-400f-4cbe-810b-ee45f74678ce-operator-scripts\") pod \"efdedbbd-400f-4cbe-810b-ee45f74678ce\" (UID: \"efdedbbd-400f-4cbe-810b-ee45f74678ce\") " Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.262592 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzx2s\" (UniqueName: \"kubernetes.io/projected/efdedbbd-400f-4cbe-810b-ee45f74678ce-kube-api-access-lzx2s\") pod \"efdedbbd-400f-4cbe-810b-ee45f74678ce\" (UID: \"efdedbbd-400f-4cbe-810b-ee45f74678ce\") " Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.262978 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ce2513b-6436-4167-8796-3769eb3cba5e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3ce2513b-6436-4167-8796-3769eb3cba5e" (UID: "3ce2513b-6436-4167-8796-3769eb3cba5e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.263000 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efdedbbd-400f-4cbe-810b-ee45f74678ce-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "efdedbbd-400f-4cbe-810b-ee45f74678ce" (UID: "efdedbbd-400f-4cbe-810b-ee45f74678ce"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.263515 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w9pk\" (UniqueName: \"kubernetes.io/projected/ebe437ed-e42a-41b6-a50c-9678c3807f8c-kube-api-access-5w9pk\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.263540 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.263572 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ndtl\" (UniqueName: \"kubernetes.io/projected/f6002596-a7c4-4efc-b162-4bdcc1cf63b8-kube-api-access-5ndtl\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.263582 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efdedbbd-400f-4cbe-810b-ee45f74678ce-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.263594 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebe437ed-e42a-41b6-a50c-9678c3807f8c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.263602 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ce2513b-6436-4167-8796-3769eb3cba5e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.265910 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdedbbd-400f-4cbe-810b-ee45f74678ce-kube-api-access-lzx2s" (OuterVolumeSpecName: "kube-api-access-lzx2s") pod "efdedbbd-400f-4cbe-810b-ee45f74678ce" (UID: "efdedbbd-400f-4cbe-810b-ee45f74678ce"). InnerVolumeSpecName "kube-api-access-lzx2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.266320 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ce2513b-6436-4167-8796-3769eb3cba5e-kube-api-access-48mch" (OuterVolumeSpecName: "kube-api-access-48mch") pod "3ce2513b-6436-4167-8796-3769eb3cba5e" (UID: "3ce2513b-6436-4167-8796-3769eb3cba5e"). InnerVolumeSpecName "kube-api-access-48mch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.365588 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzx2s\" (UniqueName: \"kubernetes.io/projected/efdedbbd-400f-4cbe-810b-ee45f74678ce-kube-api-access-lzx2s\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:54 crc kubenswrapper[4652]: I1205 05:43:54.365622 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48mch\" (UniqueName: \"kubernetes.io/projected/3ce2513b-6436-4167-8796-3769eb3cba5e-kube-api-access-48mch\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:55 crc kubenswrapper[4652]: I1205 05:43:55.008685 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-0544-account-create-update-6ht5w" Dec 05 05:43:55 crc kubenswrapper[4652]: I1205 05:43:55.008702 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-3c89-account-create-update-rctff" Dec 05 05:43:55 crc kubenswrapper[4652]: I1205 05:43:55.008795 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7n6qs" Dec 05 05:43:55 crc kubenswrapper[4652]: I1205 05:43:55.008832 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qqgfk" event={"ID":"e26f824e-a877-4436-bca3-8ecdb1d1a73c","Type":"ContainerStarted","Data":"691fab56455a90b3527c40ebd18fc5a2cc3dee28cb094719a707fd282eef9ae5"} Dec 05 05:43:55 crc kubenswrapper[4652]: I1205 05:43:55.008869 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g4pq9" Dec 05 05:43:55 crc kubenswrapper[4652]: I1205 05:43:55.043346 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-qqgfk" podStartSLOduration=2.164444908 podStartE2EDuration="8.043319952s" podCreationTimestamp="2025-12-05 05:43:47 +0000 UTC" firstStartedPulling="2025-12-05 05:43:48.071618834 +0000 UTC m=+1030.308349101" lastFinishedPulling="2025-12-05 05:43:53.950493877 +0000 UTC m=+1036.187224145" observedRunningTime="2025-12-05 05:43:55.03440214 +0000 UTC m=+1037.271132407" watchObservedRunningTime="2025-12-05 05:43:55.043319952 +0000 UTC m=+1037.280050219" Dec 05 05:43:57 crc kubenswrapper[4652]: I1205 05:43:57.023526 4652 generic.go:334] "Generic (PLEG): container finished" podID="e26f824e-a877-4436-bca3-8ecdb1d1a73c" containerID="691fab56455a90b3527c40ebd18fc5a2cc3dee28cb094719a707fd282eef9ae5" exitCode=0 Dec 05 05:43:57 crc kubenswrapper[4652]: I1205 05:43:57.023598 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qqgfk" event={"ID":"e26f824e-a877-4436-bca3-8ecdb1d1a73c","Type":"ContainerDied","Data":"691fab56455a90b3527c40ebd18fc5a2cc3dee28cb094719a707fd282eef9ae5"} Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.630728 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.766110 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-config-data\") pod \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.766462 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-combined-ca-bundle\") pod \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.766618 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hf7g\" (UniqueName: \"kubernetes.io/projected/e26f824e-a877-4436-bca3-8ecdb1d1a73c-kube-api-access-4hf7g\") pod \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\" (UID: \"e26f824e-a877-4436-bca3-8ecdb1d1a73c\") " Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.769590 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e26f824e-a877-4436-bca3-8ecdb1d1a73c-kube-api-access-4hf7g" (OuterVolumeSpecName: "kube-api-access-4hf7g") pod "e26f824e-a877-4436-bca3-8ecdb1d1a73c" (UID: "e26f824e-a877-4436-bca3-8ecdb1d1a73c"). InnerVolumeSpecName "kube-api-access-4hf7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.787505 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e26f824e-a877-4436-bca3-8ecdb1d1a73c" (UID: "e26f824e-a877-4436-bca3-8ecdb1d1a73c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.801909 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-config-data" (OuterVolumeSpecName: "config-data") pod "e26f824e-a877-4436-bca3-8ecdb1d1a73c" (UID: "e26f824e-a877-4436-bca3-8ecdb1d1a73c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.868978 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.869007 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hf7g\" (UniqueName: \"kubernetes.io/projected/e26f824e-a877-4436-bca3-8ecdb1d1a73c-kube-api-access-4hf7g\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:58 crc kubenswrapper[4652]: I1205 05:43:58.869017 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e26f824e-a877-4436-bca3-8ecdb1d1a73c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.044198 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-kt5bg" event={"ID":"dac210ca-32b5-43af-b85e-4eb7ae57e9d5","Type":"ContainerStarted","Data":"5088cc16b763bf7ef0d72a42412d52ca8415fe59b0a48f99f1d403503a2af8aa"} Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.045909 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qqgfk" event={"ID":"e26f824e-a877-4436-bca3-8ecdb1d1a73c","Type":"ContainerDied","Data":"de796c25102336e6cc667ffacf800eb6e5768487eeaa28fc522348b33e7e0792"} Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.045949 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de796c25102336e6cc667ffacf800eb6e5768487eeaa28fc522348b33e7e0792" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.045983 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qqgfk" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.062090 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-kt5bg" podStartSLOduration=1.245030249 podStartE2EDuration="10.062072963s" podCreationTimestamp="2025-12-05 05:43:49 +0000 UTC" firstStartedPulling="2025-12-05 05:43:49.748165877 +0000 UTC m=+1031.984896144" lastFinishedPulling="2025-12-05 05:43:58.565208591 +0000 UTC m=+1040.801938858" observedRunningTime="2025-12-05 05:43:59.059916281 +0000 UTC m=+1041.296646548" watchObservedRunningTime="2025-12-05 05:43:59.062072963 +0000 UTC m=+1041.298803231" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.339623 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-nmg5q"] Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.339964 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efdedbbd-400f-4cbe-810b-ee45f74678ce" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.339976 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="efdedbbd-400f-4cbe-810b-ee45f74678ce" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.339986 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ce2513b-6436-4167-8796-3769eb3cba5e" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.339992 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ce2513b-6436-4167-8796-3769eb3cba5e" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.340014 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f27cfa6f-024e-4274-9788-11b4d959f23b" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340020 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f27cfa6f-024e-4274-9788-11b4d959f23b" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.340035 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e96d2f64-5044-4f9a-908d-1f31671b7ee5" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340041 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e96d2f64-5044-4f9a-908d-1f31671b7ee5" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.340052 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe437ed-e42a-41b6-a50c-9678c3807f8c" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340058 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe437ed-e42a-41b6-a50c-9678c3807f8c" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.340068 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6002596-a7c4-4efc-b162-4bdcc1cf63b8" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340073 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6002596-a7c4-4efc-b162-4bdcc1cf63b8" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.340081 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e26f824e-a877-4436-bca3-8ecdb1d1a73c" containerName="keystone-db-sync" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340087 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e26f824e-a877-4436-bca3-8ecdb1d1a73c" containerName="keystone-db-sync" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.340100 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296a1c86-330b-46f9-9ae3-53b42c2e6cb8" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340105 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="296a1c86-330b-46f9-9ae3-53b42c2e6cb8" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.340113 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="875f6fb5-17e4-4ebf-bb40-7cecca7662ae" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340118 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="875f6fb5-17e4-4ebf-bb40-7cecca7662ae" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340254 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe437ed-e42a-41b6-a50c-9678c3807f8c" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340268 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="875f6fb5-17e4-4ebf-bb40-7cecca7662ae" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340275 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ce2513b-6436-4167-8796-3769eb3cba5e" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340284 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f27cfa6f-024e-4274-9788-11b4d959f23b" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340295 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="efdedbbd-400f-4cbe-810b-ee45f74678ce" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340300 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6002596-a7c4-4efc-b162-4bdcc1cf63b8" containerName="mariadb-database-create" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340310 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e26f824e-a877-4436-bca3-8ecdb1d1a73c" containerName="keystone-db-sync" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340321 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e96d2f64-5044-4f9a-908d-1f31671b7ee5" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340330 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="296a1c86-330b-46f9-9ae3-53b42c2e6cb8" containerName="mariadb-account-create-update" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.340856 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.343767 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.344281 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mxsrw" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.346862 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-nmg5q"] Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.377312 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-db-sync-config-data\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.377401 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg7pc\" (UniqueName: \"kubernetes.io/projected/a1e961f4-2398-4a5e-a424-e8066a6a7c78-kube-api-access-sg7pc\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.377826 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-config-data\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.378035 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-combined-ca-bundle\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.479306 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-db-sync-config-data\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.479372 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg7pc\" (UniqueName: \"kubernetes.io/projected/a1e961f4-2398-4a5e-a424-e8066a6a7c78-kube-api-access-sg7pc\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.479461 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-config-data\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.479505 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-combined-ca-bundle\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.483785 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-combined-ca-bundle\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.483927 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-config-data\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.484788 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-db-sync-config-data\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.501971 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg7pc\" (UniqueName: \"kubernetes.io/projected/a1e961f4-2398-4a5e-a424-e8066a6a7c78-kube-api-access-sg7pc\") pod \"glance-db-sync-nmg5q\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: E1205 05:43:59.626459 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330ff405_1db6_4136_b17b_679168d3125b.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.656536 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nmg5q" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.912209 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58bbf48b7f-m7wrn"] Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.913902 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.936748 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58bbf48b7f-m7wrn"] Dec 05 05:43:59 crc kubenswrapper[4652]: I1205 05:43:59.999331 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-gqz4t"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.000357 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.008499 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gqz4t"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.011179 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.011834 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.012234 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.015105 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.015234 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-h2hbv" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101182 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-scripts\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101223 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-fernet-keys\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101260 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-svc\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101279 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-combined-ca-bundle\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101316 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-swift-storage-0\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101345 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-nb\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101360 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-sb\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101394 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbkdd\" (UniqueName: \"kubernetes.io/projected/9aaadf80-3d37-46ac-8259-6d50a6de1053-kube-api-access-fbkdd\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101428 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gscxq\" (UniqueName: \"kubernetes.io/projected/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-kube-api-access-gscxq\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101448 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-config\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101463 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-config-data\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.101490 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-credential-keys\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.105298 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6795c4fff7-6rkzk"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.106572 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.113881 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.114358 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.115615 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.115712 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-rmm6x" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.122198 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6795c4fff7-6rkzk"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.175565 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-xcf4m"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.176827 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.180615 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-pvs56"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.181704 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.190486 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-s89nn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.190863 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.191079 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.191255 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.191430 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nsk9t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.192014 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.194612 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-xcf4m"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203188 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-config\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203227 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-config-data\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203265 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-credential-keys\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203291 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-scripts\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203314 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-fernet-keys\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203337 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-svc\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203358 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-combined-ca-bundle\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203409 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-swift-storage-0\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203441 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-nb\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203460 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-sb\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203492 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbkdd\" (UniqueName: \"kubernetes.io/projected/9aaadf80-3d37-46ac-8259-6d50a6de1053-kube-api-access-fbkdd\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.203520 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gscxq\" (UniqueName: \"kubernetes.io/projected/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-kube-api-access-gscxq\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.205034 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pvs56"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.205237 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-config\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.205291 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-svc\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.206991 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-nb\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.207173 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-sb\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.207799 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-swift-storage-0\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.220998 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-credential-keys\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.226978 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-combined-ca-bundle\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.229844 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-scripts\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.230314 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gscxq\" (UniqueName: \"kubernetes.io/projected/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-kube-api-access-gscxq\") pod \"dnsmasq-dns-58bbf48b7f-m7wrn\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.244669 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.251047 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbkdd\" (UniqueName: \"kubernetes.io/projected/9aaadf80-3d37-46ac-8259-6d50a6de1053-kube-api-access-fbkdd\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.258328 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-config-data\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.261045 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-fernet-keys\") pod \"keystone-bootstrap-gqz4t\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.280020 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.290104 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.301310 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.301323 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.311374 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxlzp\" (UniqueName: \"kubernetes.io/projected/298ca280-89e1-42fc-8d46-b63f6588896f-kube-api-access-cxlzp\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312029 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-combined-ca-bundle\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312113 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-config-data\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312158 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-combined-ca-bundle\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312177 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-config-data\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312199 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffkhs\" (UniqueName: \"kubernetes.io/projected/266e7065-7af6-4547-b7bb-5e981e095969-kube-api-access-ffkhs\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312222 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b190f7c2-4b39-46ac-a309-fdee7641c525-horizon-secret-key\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312265 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-config\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312280 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b190f7c2-4b39-46ac-a309-fdee7641c525-logs\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312297 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxhxv\" (UniqueName: \"kubernetes.io/projected/b190f7c2-4b39-46ac-a309-fdee7641c525-kube-api-access-fxhxv\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312346 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-scripts\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312370 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/266e7065-7af6-4547-b7bb-5e981e095969-etc-machine-id\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312396 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-scripts\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.312413 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-db-sync-config-data\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.330438 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.351867 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.361841 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7d8787bf9c-pxslp"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.363713 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.389501 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7d8787bf9c-pxslp"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.412017 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-wqg7s"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.414197 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.414837 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.414884 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffkhs\" (UniqueName: \"kubernetes.io/projected/266e7065-7af6-4547-b7bb-5e981e095969-kube-api-access-ffkhs\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.414917 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b190f7c2-4b39-46ac-a309-fdee7641c525-horizon-secret-key\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.414968 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-config\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.414986 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b190f7c2-4b39-46ac-a309-fdee7641c525-logs\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415001 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-config-data\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415015 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415036 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxhxv\" (UniqueName: \"kubernetes.io/projected/b190f7c2-4b39-46ac-a309-fdee7641c525-kube-api-access-fxhxv\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415072 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-scripts\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415096 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-scripts\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415115 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/266e7065-7af6-4547-b7bb-5e981e095969-etc-machine-id\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415148 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-scripts\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415166 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-db-sync-config-data\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415181 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-run-httpd\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415242 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-log-httpd\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415268 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxlzp\" (UniqueName: \"kubernetes.io/projected/298ca280-89e1-42fc-8d46-b63f6588896f-kube-api-access-cxlzp\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415357 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-combined-ca-bundle\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415385 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-config-data\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415410 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf97b\" (UniqueName: \"kubernetes.io/projected/70b3ad88-854c-4047-adbe-a9a7c01b9f81-kube-api-access-pf97b\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415447 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-combined-ca-bundle\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.415468 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-config-data\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.417358 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b190f7c2-4b39-46ac-a309-fdee7641c525-logs\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.417452 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-scripts\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.417503 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/266e7065-7af6-4547-b7bb-5e981e095969-etc-machine-id\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.417595 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.417712 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-jrz9t" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.418664 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-config-data\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.429290 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-combined-ca-bundle\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.429287 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-scripts\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.429747 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b190f7c2-4b39-46ac-a309-fdee7641c525-horizon-secret-key\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.430752 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-combined-ca-bundle\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.431060 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-db-sync-config-data\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.432022 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-config\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.440417 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-config-data\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.444718 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-mlkhj"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.446095 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxhxv\" (UniqueName: \"kubernetes.io/projected/b190f7c2-4b39-46ac-a309-fdee7641c525-kube-api-access-fxhxv\") pod \"horizon-6795c4fff7-6rkzk\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.450253 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.455029 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.455219 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.455354 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-plpwr" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.455459 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxlzp\" (UniqueName: \"kubernetes.io/projected/298ca280-89e1-42fc-8d46-b63f6588896f-kube-api-access-cxlzp\") pod \"neutron-db-sync-pvs56\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.455903 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffkhs\" (UniqueName: \"kubernetes.io/projected/266e7065-7af6-4547-b7bb-5e981e095969-kube-api-access-ffkhs\") pod \"cinder-db-sync-xcf4m\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.518487 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-mlkhj"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521294 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-config-data\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521336 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521392 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-scripts\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521431 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/34e6630c-45d1-4105-9ce1-a22701d0231a-horizon-secret-key\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521456 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-db-sync-config-data\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521493 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-run-httpd\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521578 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-log-httpd\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521597 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl47g\" (UniqueName: \"kubernetes.io/projected/34e6630c-45d1-4105-9ce1-a22701d0231a-kube-api-access-rl47g\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521622 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-config-data\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521675 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-combined-ca-bundle\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521698 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34e6630c-45d1-4105-9ce1-a22701d0231a-logs\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521801 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf97b\" (UniqueName: \"kubernetes.io/projected/70b3ad88-854c-4047-adbe-a9a7c01b9f81-kube-api-access-pf97b\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521828 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-scripts\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521848 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pss7l\" (UniqueName: \"kubernetes.io/projected/3415917b-2730-494b-b474-b1bd9c1d08f5-kube-api-access-pss7l\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.521893 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.528356 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.533809 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-run-httpd\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.534546 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-scripts\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.534703 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-wqg7s"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.535240 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.539343 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-config-data\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.547081 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-log-httpd\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.547655 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.574503 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf97b\" (UniqueName: \"kubernetes.io/projected/70b3ad88-854c-4047-adbe-a9a7c01b9f81-kube-api-access-pf97b\") pod \"ceilometer-0\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.586156 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bbf48b7f-m7wrn"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.592967 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.596641 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-578598f949-nfsj7"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.598205 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.605912 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578598f949-nfsj7"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.621643 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-nmg5q"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.625941 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-config-data\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.625987 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86c35465-1240-412c-9182-99d8ed10f948-logs\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626036 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl47g\" (UniqueName: \"kubernetes.io/projected/34e6630c-45d1-4105-9ce1-a22701d0231a-kube-api-access-rl47g\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626052 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-config-data\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626084 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-combined-ca-bundle\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626103 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34e6630c-45d1-4105-9ce1-a22701d0231a-logs\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626117 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-combined-ca-bundle\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626305 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-scripts\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626342 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pss7l\" (UniqueName: \"kubernetes.io/projected/3415917b-2730-494b-b474-b1bd9c1d08f5-kube-api-access-pss7l\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626444 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-scripts\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626483 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjhbq\" (UniqueName: \"kubernetes.io/projected/86c35465-1240-412c-9182-99d8ed10f948-kube-api-access-qjhbq\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626598 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/34e6630c-45d1-4105-9ce1-a22701d0231a-horizon-secret-key\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.626619 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-db-sync-config-data\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.627935 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34e6630c-45d1-4105-9ce1-a22701d0231a-logs\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.628334 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-scripts\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.628920 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-config-data\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.632290 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.633044 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-combined-ca-bundle\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.633910 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/34e6630c-45d1-4105-9ce1-a22701d0231a-horizon-secret-key\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.634073 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-db-sync-config-data\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.649834 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl47g\" (UniqueName: \"kubernetes.io/projected/34e6630c-45d1-4105-9ce1-a22701d0231a-kube-api-access-rl47g\") pod \"horizon-7d8787bf9c-pxslp\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.663527 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pss7l\" (UniqueName: \"kubernetes.io/projected/3415917b-2730-494b-b474-b1bd9c1d08f5-kube-api-access-pss7l\") pod \"barbican-db-sync-wqg7s\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.690512 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730171 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-svc\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730226 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-config\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730300 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-scripts\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730366 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjhbq\" (UniqueName: \"kubernetes.io/projected/86c35465-1240-412c-9182-99d8ed10f948-kube-api-access-qjhbq\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730398 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmls8\" (UniqueName: \"kubernetes.io/projected/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-kube-api-access-cmls8\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730497 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-nb\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730585 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-config-data\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730626 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86c35465-1240-412c-9182-99d8ed10f948-logs\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.730998 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-sb\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.731074 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-combined-ca-bundle\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.731108 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-swift-storage-0\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.736902 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.733183 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.743234 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86c35465-1240-412c-9182-99d8ed10f948-logs\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.743354 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-config-data\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.743427 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-scripts\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.743866 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-combined-ca-bundle\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.749371 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjhbq\" (UniqueName: \"kubernetes.io/projected/86c35465-1240-412c-9182-99d8ed10f948-kube-api-access-qjhbq\") pod \"placement-db-sync-mlkhj\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.778676 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.833919 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-nb\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.834063 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-sb\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.834115 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-swift-storage-0\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.834571 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-svc\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.834591 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-config\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.834664 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmls8\" (UniqueName: \"kubernetes.io/projected/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-kube-api-access-cmls8\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.835343 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-nb\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.835671 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-sb\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.836067 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-svc\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.836219 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-config\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.836685 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-swift-storage-0\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.845673 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bbf48b7f-m7wrn"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.848993 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmls8\" (UniqueName: \"kubernetes.io/projected/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-kube-api-access-cmls8\") pod \"dnsmasq-dns-578598f949-nfsj7\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: W1205 05:44:00.854665 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb20e4fb1_1fe5_4264_b344_258ca4ae42ae.slice/crio-f9ea21a7f105c416e69a1248464c0e879aa8b489b46d8775d69fe05353a42deb WatchSource:0}: Error finding container f9ea21a7f105c416e69a1248464c0e879aa8b489b46d8775d69fe05353a42deb: Status 404 returned error can't find the container with id f9ea21a7f105c416e69a1248464c0e879aa8b489b46d8775d69fe05353a42deb Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.942147 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gqz4t"] Dec 05 05:44:00 crc kubenswrapper[4652]: I1205 05:44:00.959365 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:00 crc kubenswrapper[4652]: W1205 05:44:00.984349 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9aaadf80_3d37_46ac_8259_6d50a6de1053.slice/crio-e9cda0951f93e161a73d880db0d0332f91cd455c4a2f31bc388e72cdf32a5c4d WatchSource:0}: Error finding container e9cda0951f93e161a73d880db0d0332f91cd455c4a2f31bc388e72cdf32a5c4d: Status 404 returned error can't find the container with id e9cda0951f93e161a73d880db0d0332f91cd455c4a2f31bc388e72cdf32a5c4d Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.088299 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gqz4t" event={"ID":"9aaadf80-3d37-46ac-8259-6d50a6de1053","Type":"ContainerStarted","Data":"e9cda0951f93e161a73d880db0d0332f91cd455c4a2f31bc388e72cdf32a5c4d"} Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.090324 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pvs56"] Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.099057 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" event={"ID":"b20e4fb1-1fe5-4264-b344-258ca4ae42ae","Type":"ContainerStarted","Data":"f9ea21a7f105c416e69a1248464c0e879aa8b489b46d8775d69fe05353a42deb"} Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.102077 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nmg5q" event={"ID":"a1e961f4-2398-4a5e-a424-e8066a6a7c78","Type":"ContainerStarted","Data":"dad0187ac78b926326e87fa4f7dafbed4c4ef356fdf8d470ed1c7b2dc7c1ad95"} Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.105927 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-xcf4m"] Dec 05 05:44:01 crc kubenswrapper[4652]: W1205 05:44:01.128356 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod266e7065_7af6_4547_b7bb_5e981e095969.slice/crio-c95a151926a48c092d999269319cfc2313153e8995c69d0bb7450f7fadb46abe WatchSource:0}: Error finding container c95a151926a48c092d999269319cfc2313153e8995c69d0bb7450f7fadb46abe: Status 404 returned error can't find the container with id c95a151926a48c092d999269319cfc2313153e8995c69d0bb7450f7fadb46abe Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.225428 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.298409 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-wqg7s"] Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.307390 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7d8787bf9c-pxslp"] Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.431939 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578598f949-nfsj7"] Dec 05 05:44:01 crc kubenswrapper[4652]: W1205 05:44:01.435404 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab9a77c7_a8c7_48bb_8d69_0eeaea574ac5.slice/crio-811f14f23cd0a760d8369cb560ba21f4b294b36f9d51dc0a8266600dedb9fb01 WatchSource:0}: Error finding container 811f14f23cd0a760d8369cb560ba21f4b294b36f9d51dc0a8266600dedb9fb01: Status 404 returned error can't find the container with id 811f14f23cd0a760d8369cb560ba21f4b294b36f9d51dc0a8266600dedb9fb01 Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.502595 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-mlkhj"] Dec 05 05:44:01 crc kubenswrapper[4652]: I1205 05:44:01.553804 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6795c4fff7-6rkzk"] Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.108436 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-mlkhj" event={"ID":"86c35465-1240-412c-9182-99d8ed10f948","Type":"ContainerStarted","Data":"3046734acd569772574e3465386d2519f3b5b95103f288b3ac7464697b25fcec"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.109539 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pvs56" event={"ID":"298ca280-89e1-42fc-8d46-b63f6588896f","Type":"ContainerStarted","Data":"7157a9487fe7eada5e68e53d7dfcaa38333229795eff27006694d6360a5e0d75"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.110348 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d8787bf9c-pxslp" event={"ID":"34e6630c-45d1-4105-9ce1-a22701d0231a","Type":"ContainerStarted","Data":"63bacf8f6fa09400a375b5278a1f986b99648319eccea16db905f2801ab5639f"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.111609 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xcf4m" event={"ID":"266e7065-7af6-4547-b7bb-5e981e095969","Type":"ContainerStarted","Data":"c95a151926a48c092d999269319cfc2313153e8995c69d0bb7450f7fadb46abe"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.112952 4652 generic.go:334] "Generic (PLEG): container finished" podID="dac210ca-32b5-43af-b85e-4eb7ae57e9d5" containerID="5088cc16b763bf7ef0d72a42412d52ca8415fe59b0a48f99f1d403503a2af8aa" exitCode=0 Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.112994 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-kt5bg" event={"ID":"dac210ca-32b5-43af-b85e-4eb7ae57e9d5","Type":"ContainerDied","Data":"5088cc16b763bf7ef0d72a42412d52ca8415fe59b0a48f99f1d403503a2af8aa"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.115057 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerStarted","Data":"5f2c33c0d73400c617cf78562185392350fbabd29c71ebbf2b120df6cca535ef"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.115862 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-wqg7s" event={"ID":"3415917b-2730-494b-b474-b1bd9c1d08f5","Type":"ContainerStarted","Data":"68343fc1dbff44a02b7d2fe290c3a8d3ec499c0b1130f91e2d3fff254d7ee07e"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.116719 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578598f949-nfsj7" event={"ID":"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5","Type":"ContainerStarted","Data":"811f14f23cd0a760d8369cb560ba21f4b294b36f9d51dc0a8266600dedb9fb01"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.117728 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6795c4fff7-6rkzk" event={"ID":"b190f7c2-4b39-46ac-a309-fdee7641c525","Type":"ContainerStarted","Data":"9ebbe2e5790107d8fbfc155ac6f7d341f8db57331aaa7786056df9a2f0eece1f"} Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.379343 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6795c4fff7-6rkzk"] Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.399660 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.412442 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-84bb48cbc7-nw55m"] Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.413824 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.435892 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84bb48cbc7-nw55m"] Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.468306 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-config-data\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.468365 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-logs\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.468444 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k72cs\" (UniqueName: \"kubernetes.io/projected/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-kube-api-access-k72cs\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.468466 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-scripts\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.468572 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-horizon-secret-key\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.570897 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k72cs\" (UniqueName: \"kubernetes.io/projected/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-kube-api-access-k72cs\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.570965 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-scripts\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.571264 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-horizon-secret-key\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.571351 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-config-data\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.571408 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-logs\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.571806 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-logs\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.571905 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-scripts\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.572431 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-config-data\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.579758 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-horizon-secret-key\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.585692 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k72cs\" (UniqueName: \"kubernetes.io/projected/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-kube-api-access-k72cs\") pod \"horizon-84bb48cbc7-nw55m\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:02 crc kubenswrapper[4652]: I1205 05:44:02.734129 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.127164 4652 generic.go:334] "Generic (PLEG): container finished" podID="b20e4fb1-1fe5-4264-b344-258ca4ae42ae" containerID="652c87b4bfd678fa23a94536c03fd6b7d4f3e949a8940326d87aeb26c4eb0720" exitCode=0 Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.127544 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" event={"ID":"b20e4fb1-1fe5-4264-b344-258ca4ae42ae","Type":"ContainerDied","Data":"652c87b4bfd678fa23a94536c03fd6b7d4f3e949a8940326d87aeb26c4eb0720"} Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.128907 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84bb48cbc7-nw55m"] Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.147296 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pvs56" event={"ID":"298ca280-89e1-42fc-8d46-b63f6588896f","Type":"ContainerStarted","Data":"cc9549f1668c5d9c63a3650a583b150712103f246970d896899fbb3c64173558"} Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.152931 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gqz4t" event={"ID":"9aaadf80-3d37-46ac-8259-6d50a6de1053","Type":"ContainerStarted","Data":"4b405aa28f93563e533b354028f7c907c7631e66ce2ac3589c09dc152d3b6357"} Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.168286 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" containerID="8b303bdf22b72383711531f134b7fcd5201475737fe6c3b49fadc32dc9906e1b" exitCode=0 Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.168684 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578598f949-nfsj7" event={"ID":"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5","Type":"ContainerDied","Data":"8b303bdf22b72383711531f134b7fcd5201475737fe6c3b49fadc32dc9906e1b"} Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.169854 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-pvs56" podStartSLOduration=3.169839173 podStartE2EDuration="3.169839173s" podCreationTimestamp="2025-12-05 05:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:03.160774014 +0000 UTC m=+1045.397504281" watchObservedRunningTime="2025-12-05 05:44:03.169839173 +0000 UTC m=+1045.406569440" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.193797 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-gqz4t" podStartSLOduration=4.193780665 podStartE2EDuration="4.193780665s" podCreationTimestamp="2025-12-05 05:43:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:03.186686489 +0000 UTC m=+1045.423416746" watchObservedRunningTime="2025-12-05 05:44:03.193780665 +0000 UTC m=+1045.430510932" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.402539 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.488412 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.498360 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-db-sync-config-data\") pod \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.498410 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-config\") pod \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.498447 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8lqz\" (UniqueName: \"kubernetes.io/projected/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-kube-api-access-j8lqz\") pod \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.510388 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-kube-api-access-j8lqz" (OuterVolumeSpecName: "kube-api-access-j8lqz") pod "dac210ca-32b5-43af-b85e-4eb7ae57e9d5" (UID: "dac210ca-32b5-43af-b85e-4eb7ae57e9d5"). InnerVolumeSpecName "kube-api-access-j8lqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.510540 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "dac210ca-32b5-43af-b85e-4eb7ae57e9d5" (UID: "dac210ca-32b5-43af-b85e-4eb7ae57e9d5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.527290 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-config" (OuterVolumeSpecName: "config") pod "b20e4fb1-1fe5-4264-b344-258ca4ae42ae" (UID: "b20e4fb1-1fe5-4264-b344-258ca4ae42ae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.603843 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-nb\") pod \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.603885 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-svc\") pod \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.603926 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-config-data\") pod \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.604036 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-swift-storage-0\") pod \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.604098 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-sb\") pod \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.604147 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gscxq\" (UniqueName: \"kubernetes.io/projected/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-kube-api-access-gscxq\") pod \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\" (UID: \"b20e4fb1-1fe5-4264-b344-258ca4ae42ae\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.604678 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-combined-ca-bundle\") pod \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\" (UID: \"dac210ca-32b5-43af-b85e-4eb7ae57e9d5\") " Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.605136 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.605153 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8lqz\" (UniqueName: \"kubernetes.io/projected/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-kube-api-access-j8lqz\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.605163 4652 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.621521 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-kube-api-access-gscxq" (OuterVolumeSpecName: "kube-api-access-gscxq") pod "b20e4fb1-1fe5-4264-b344-258ca4ae42ae" (UID: "b20e4fb1-1fe5-4264-b344-258ca4ae42ae"). InnerVolumeSpecName "kube-api-access-gscxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.629108 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b20e4fb1-1fe5-4264-b344-258ca4ae42ae" (UID: "b20e4fb1-1fe5-4264-b344-258ca4ae42ae"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.645187 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b20e4fb1-1fe5-4264-b344-258ca4ae42ae" (UID: "b20e4fb1-1fe5-4264-b344-258ca4ae42ae"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.657511 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b20e4fb1-1fe5-4264-b344-258ca4ae42ae" (UID: "b20e4fb1-1fe5-4264-b344-258ca4ae42ae"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.658483 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b20e4fb1-1fe5-4264-b344-258ca4ae42ae" (UID: "b20e4fb1-1fe5-4264-b344-258ca4ae42ae"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.669265 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dac210ca-32b5-43af-b85e-4eb7ae57e9d5" (UID: "dac210ca-32b5-43af-b85e-4eb7ae57e9d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.690030 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-config-data" (OuterVolumeSpecName: "config-data") pod "dac210ca-32b5-43af-b85e-4eb7ae57e9d5" (UID: "dac210ca-32b5-43af-b85e-4eb7ae57e9d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.708212 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.708592 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.708610 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gscxq\" (UniqueName: \"kubernetes.io/projected/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-kube-api-access-gscxq\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.708741 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.708751 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.708801 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b20e4fb1-1fe5-4264-b344-258ca4ae42ae-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:03 crc kubenswrapper[4652]: I1205 05:44:03.708826 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dac210ca-32b5-43af-b85e-4eb7ae57e9d5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.162502 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.162832 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.195357 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.195675 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bbf48b7f-m7wrn" event={"ID":"b20e4fb1-1fe5-4264-b344-258ca4ae42ae","Type":"ContainerDied","Data":"f9ea21a7f105c416e69a1248464c0e879aa8b489b46d8775d69fe05353a42deb"} Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.195769 4652 scope.go:117] "RemoveContainer" containerID="652c87b4bfd678fa23a94536c03fd6b7d4f3e949a8940326d87aeb26c4eb0720" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.200164 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-kt5bg" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.200178 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-kt5bg" event={"ID":"dac210ca-32b5-43af-b85e-4eb7ae57e9d5","Type":"ContainerDied","Data":"cf2cd05c4bb3dd15df18aca28a24cc9eeea0168944b69929d659790e9d0f9dcc"} Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.200256 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf2cd05c4bb3dd15df18aca28a24cc9eeea0168944b69929d659790e9d0f9dcc" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.202324 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84bb48cbc7-nw55m" event={"ID":"8b79d3c1-ccd2-454f-979d-67561bb7cfe5","Type":"ContainerStarted","Data":"8966da6b8c4e3f2a3f3a94adf1cf2fe67dbe90581822d241ab9f3c0a8a4b0c09"} Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.204758 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578598f949-nfsj7" event={"ID":"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5","Type":"ContainerStarted","Data":"cf3caa178abc46f36d7fd9cd916042ff15942ac8fdb39995b977cb63bcbcd100"} Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.205709 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.307611 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bbf48b7f-m7wrn"] Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.327655 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58bbf48b7f-m7wrn"] Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.335770 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-578598f949-nfsj7" podStartSLOduration=4.335744373 podStartE2EDuration="4.335744373s" podCreationTimestamp="2025-12-05 05:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:04.267682822 +0000 UTC m=+1046.504413089" watchObservedRunningTime="2025-12-05 05:44:04.335744373 +0000 UTC m=+1046.572474640" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.394708 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:44:04 crc kubenswrapper[4652]: E1205 05:44:04.395349 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dac210ca-32b5-43af-b85e-4eb7ae57e9d5" containerName="watcher-db-sync" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.395374 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="dac210ca-32b5-43af-b85e-4eb7ae57e9d5" containerName="watcher-db-sync" Dec 05 05:44:04 crc kubenswrapper[4652]: E1205 05:44:04.395383 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b20e4fb1-1fe5-4264-b344-258ca4ae42ae" containerName="init" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.395392 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b20e4fb1-1fe5-4264-b344-258ca4ae42ae" containerName="init" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.395707 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="dac210ca-32b5-43af-b85e-4eb7ae57e9d5" containerName="watcher-db-sync" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.395726 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b20e4fb1-1fe5-4264-b344-258ca4ae42ae" containerName="init" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.396532 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.398836 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-qz5nm" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.399033 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.418027 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.448601 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.449921 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.452314 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.472222 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.518952 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.528165 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.535904 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.536133 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.536206 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-config-data\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.537136 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca914a04-6a6f-4b20-af32-e0771a7dffa5-logs\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.538708 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjlf5\" (UniqueName: \"kubernetes.io/projected/ca914a04-6a6f-4b20-af32-e0771a7dffa5-kube-api-access-sjlf5\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.547181 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.549926 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643177 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c4v7\" (UniqueName: \"kubernetes.io/projected/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-kube-api-access-9c4v7\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643228 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643252 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-config-data\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643302 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643343 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643362 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-logs\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643384 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8a23085-d070-48b5-8253-5c991de8bd53-logs\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643418 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643453 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-config-data\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643512 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-config-data\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643543 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca914a04-6a6f-4b20-af32-e0771a7dffa5-logs\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643591 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjlf5\" (UniqueName: \"kubernetes.io/projected/ca914a04-6a6f-4b20-af32-e0771a7dffa5-kube-api-access-sjlf5\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643640 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.643729 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lc2q\" (UniqueName: \"kubernetes.io/projected/b8a23085-d070-48b5-8253-5c991de8bd53-kube-api-access-7lc2q\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.645570 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca914a04-6a6f-4b20-af32-e0771a7dffa5-logs\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.650089 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.661284 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-config-data\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.664967 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjlf5\" (UniqueName: \"kubernetes.io/projected/ca914a04-6a6f-4b20-af32-e0771a7dffa5-kube-api-access-sjlf5\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.673482 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.713766 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.745669 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.746675 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.746845 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-logs\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.746919 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8a23085-d070-48b5-8253-5c991de8bd53-logs\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.747064 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-config-data\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.747741 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8a23085-d070-48b5-8253-5c991de8bd53-logs\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.747787 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.747983 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lc2q\" (UniqueName: \"kubernetes.io/projected/b8a23085-d070-48b5-8253-5c991de8bd53-kube-api-access-7lc2q\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.748058 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c4v7\" (UniqueName: \"kubernetes.io/projected/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-kube-api-access-9c4v7\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.748124 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-config-data\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.747481 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-logs\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.752873 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.760934 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.767609 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-config-data\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.770077 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.789295 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-config-data\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.789399 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c4v7\" (UniqueName: \"kubernetes.io/projected/e7ae16a1-df00-4d9b-bd3e-16d2d81946d4-kube-api-access-9c4v7\") pod \"watcher-applier-0\" (UID: \"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4\") " pod="openstack/watcher-applier-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.789751 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lc2q\" (UniqueName: \"kubernetes.io/projected/b8a23085-d070-48b5-8253-5c991de8bd53-kube-api-access-7lc2q\") pod \"watcher-api-0\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " pod="openstack/watcher-api-0" Dec 05 05:44:04 crc kubenswrapper[4652]: I1205 05:44:04.871335 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 05:44:05 crc kubenswrapper[4652]: I1205 05:44:05.074227 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Dec 05 05:44:05 crc kubenswrapper[4652]: I1205 05:44:05.304343 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:44:05 crc kubenswrapper[4652]: I1205 05:44:05.380571 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:05 crc kubenswrapper[4652]: W1205 05:44:05.392009 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8a23085_d070_48b5_8253_5c991de8bd53.slice/crio-37bc889e8ea032b5c2a31d16d139d4bce29eb3b7073796459529947dc2ffcc39 WatchSource:0}: Error finding container 37bc889e8ea032b5c2a31d16d139d4bce29eb3b7073796459529947dc2ffcc39: Status 404 returned error can't find the container with id 37bc889e8ea032b5c2a31d16d139d4bce29eb3b7073796459529947dc2ffcc39 Dec 05 05:44:05 crc kubenswrapper[4652]: I1205 05:44:05.582426 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Dec 05 05:44:05 crc kubenswrapper[4652]: W1205 05:44:05.766369 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7ae16a1_df00_4d9b_bd3e_16d2d81946d4.slice/crio-1b0b3a7ef031fa83f0c79bb0af94edb7b7750dde762d20588ca34ea5c845f105 WatchSource:0}: Error finding container 1b0b3a7ef031fa83f0c79bb0af94edb7b7750dde762d20588ca34ea5c845f105: Status 404 returned error can't find the container with id 1b0b3a7ef031fa83f0c79bb0af94edb7b7750dde762d20588ca34ea5c845f105 Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.146753 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b20e4fb1-1fe5-4264-b344-258ca4ae42ae" path="/var/lib/kubelet/pods/b20e4fb1-1fe5-4264-b344-258ca4ae42ae/volumes" Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.253053 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerStarted","Data":"f52fa9fa118a728aa94d83e2dcd0e592d628993e3ef1b5cfa0bb2c69dd697693"} Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.255544 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4","Type":"ContainerStarted","Data":"1b0b3a7ef031fa83f0c79bb0af94edb7b7750dde762d20588ca34ea5c845f105"} Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.262197 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b8a23085-d070-48b5-8253-5c991de8bd53","Type":"ContainerStarted","Data":"e6096da6072b719b80a442ed9d322c33a010864f8440e07b5c390846f064c1cd"} Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.262347 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b8a23085-d070-48b5-8253-5c991de8bd53","Type":"ContainerStarted","Data":"39bdbf32fd4d60e1fb3cca4f805e449046dfb320cf38e6ab856d29891836c2ac"} Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.262470 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b8a23085-d070-48b5-8253-5c991de8bd53","Type":"ContainerStarted","Data":"37bc889e8ea032b5c2a31d16d139d4bce29eb3b7073796459529947dc2ffcc39"} Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.263938 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.282735 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": dial tcp 10.217.0.158:9322: connect: connection refused" Dec 05 05:44:06 crc kubenswrapper[4652]: I1205 05:44:06.292750 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=2.292734568 podStartE2EDuration="2.292734568s" podCreationTimestamp="2025-12-05 05:44:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:06.286899086 +0000 UTC m=+1048.523629353" watchObservedRunningTime="2025-12-05 05:44:06.292734568 +0000 UTC m=+1048.529464824" Dec 05 05:44:07 crc kubenswrapper[4652]: I1205 05:44:07.272210 4652 generic.go:334] "Generic (PLEG): container finished" podID="9aaadf80-3d37-46ac-8259-6d50a6de1053" containerID="4b405aa28f93563e533b354028f7c907c7631e66ce2ac3589c09dc152d3b6357" exitCode=0 Dec 05 05:44:07 crc kubenswrapper[4652]: I1205 05:44:07.272305 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gqz4t" event={"ID":"9aaadf80-3d37-46ac-8259-6d50a6de1053","Type":"ContainerDied","Data":"4b405aa28f93563e533b354028f7c907c7631e66ce2ac3589c09dc152d3b6357"} Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.380946 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.640369 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7d8787bf9c-pxslp"] Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.692070 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7744f5c9d6-t75mf"] Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.698965 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.703511 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.703775 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7744f5c9d6-t75mf"] Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.775067 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84bb48cbc7-nw55m"] Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.799814 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-config-data\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.799887 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e04a533-fea2-4fde-a50b-5852129fa912-logs\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.799948 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-combined-ca-bundle\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.799967 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5l58\" (UniqueName: \"kubernetes.io/projected/4e04a533-fea2-4fde-a50b-5852129fa912-kube-api-access-s5l58\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.800125 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-secret-key\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.800183 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-tls-certs\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.800324 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-scripts\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.841681 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-c77c7b944-twjsn"] Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.844111 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.865586 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c77c7b944-twjsn"] Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.871858 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.906865 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-scripts\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.906987 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-config-data\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.907037 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e04a533-fea2-4fde-a50b-5852129fa912-logs\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.907083 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-combined-ca-bundle\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.907106 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5l58\" (UniqueName: \"kubernetes.io/projected/4e04a533-fea2-4fde-a50b-5852129fa912-kube-api-access-s5l58\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.907224 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-secret-key\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.907265 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-tls-certs\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.908989 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e04a533-fea2-4fde-a50b-5852129fa912-logs\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.911879 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-config-data\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.922422 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5l58\" (UniqueName: \"kubernetes.io/projected/4e04a533-fea2-4fde-a50b-5852129fa912-kube-api-access-s5l58\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.923937 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-scripts\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.925057 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-combined-ca-bundle\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.929699 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-tls-certs\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: I1205 05:44:09.931485 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-secret-key\") pod \"horizon-7744f5c9d6-t75mf\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:09 crc kubenswrapper[4652]: E1205 05:44:09.968120 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330ff405_1db6_4136_b17b_679168d3125b.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.010713 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-combined-ca-bundle\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.010871 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-horizon-tls-certs\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.011184 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1465128-fcb6-49f8-8879-96e87d51b967-config-data\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.011257 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9ghv\" (UniqueName: \"kubernetes.io/projected/a1465128-fcb6-49f8-8879-96e87d51b967-kube-api-access-q9ghv\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.011336 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-horizon-secret-key\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.011611 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1465128-fcb6-49f8-8879-96e87d51b967-scripts\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.011796 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1465128-fcb6-49f8-8879-96e87d51b967-logs\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.018884 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.114567 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1465128-fcb6-49f8-8879-96e87d51b967-config-data\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.114631 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9ghv\" (UniqueName: \"kubernetes.io/projected/a1465128-fcb6-49f8-8879-96e87d51b967-kube-api-access-q9ghv\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.114687 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-horizon-secret-key\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.114814 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1465128-fcb6-49f8-8879-96e87d51b967-scripts\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.114840 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1465128-fcb6-49f8-8879-96e87d51b967-logs\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.114883 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-combined-ca-bundle\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.114922 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-horizon-tls-certs\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.115770 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1465128-fcb6-49f8-8879-96e87d51b967-logs\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.116233 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a1465128-fcb6-49f8-8879-96e87d51b967-scripts\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.118139 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1465128-fcb6-49f8-8879-96e87d51b967-config-data\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.118783 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-horizon-tls-certs\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.121430 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-horizon-secret-key\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.131733 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9ghv\" (UniqueName: \"kubernetes.io/projected/a1465128-fcb6-49f8-8879-96e87d51b967-kube-api-access-q9ghv\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.137416 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1465128-fcb6-49f8-8879-96e87d51b967-combined-ca-bundle\") pod \"horizon-c77c7b944-twjsn\" (UID: \"a1465128-fcb6-49f8-8879-96e87d51b967\") " pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.280176 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:10 crc kubenswrapper[4652]: I1205 05:44:10.961789 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.016219 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-frd2k"] Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.016538 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" podUID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerName="dnsmasq-dns" containerID="cri-o://b5a31b91879b3d21f74f2d2dacf5daed3d54cafc6490c1747146e34c5f65fb0e" gracePeriod=10 Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.335814 4652 generic.go:334] "Generic (PLEG): container finished" podID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerID="b5a31b91879b3d21f74f2d2dacf5daed3d54cafc6490c1747146e34c5f65fb0e" exitCode=0 Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.336205 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" event={"ID":"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1","Type":"ContainerDied","Data":"b5a31b91879b3d21f74f2d2dacf5daed3d54cafc6490c1747146e34c5f65fb0e"} Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.462180 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.551272 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-fernet-keys\") pod \"9aaadf80-3d37-46ac-8259-6d50a6de1053\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.551350 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-combined-ca-bundle\") pod \"9aaadf80-3d37-46ac-8259-6d50a6de1053\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.551377 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-config-data\") pod \"9aaadf80-3d37-46ac-8259-6d50a6de1053\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.551503 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-scripts\") pod \"9aaadf80-3d37-46ac-8259-6d50a6de1053\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.551535 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbkdd\" (UniqueName: \"kubernetes.io/projected/9aaadf80-3d37-46ac-8259-6d50a6de1053-kube-api-access-fbkdd\") pod \"9aaadf80-3d37-46ac-8259-6d50a6de1053\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.551579 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-credential-keys\") pod \"9aaadf80-3d37-46ac-8259-6d50a6de1053\" (UID: \"9aaadf80-3d37-46ac-8259-6d50a6de1053\") " Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.564419 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aaadf80-3d37-46ac-8259-6d50a6de1053-kube-api-access-fbkdd" (OuterVolumeSpecName: "kube-api-access-fbkdd") pod "9aaadf80-3d37-46ac-8259-6d50a6de1053" (UID: "9aaadf80-3d37-46ac-8259-6d50a6de1053"). InnerVolumeSpecName "kube-api-access-fbkdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.575082 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9aaadf80-3d37-46ac-8259-6d50a6de1053" (UID: "9aaadf80-3d37-46ac-8259-6d50a6de1053"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.589047 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-scripts" (OuterVolumeSpecName: "scripts") pod "9aaadf80-3d37-46ac-8259-6d50a6de1053" (UID: "9aaadf80-3d37-46ac-8259-6d50a6de1053"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.595835 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9aaadf80-3d37-46ac-8259-6d50a6de1053" (UID: "9aaadf80-3d37-46ac-8259-6d50a6de1053"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.640288 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-config-data" (OuterVolumeSpecName: "config-data") pod "9aaadf80-3d37-46ac-8259-6d50a6de1053" (UID: "9aaadf80-3d37-46ac-8259-6d50a6de1053"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.655541 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.655723 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbkdd\" (UniqueName: \"kubernetes.io/projected/9aaadf80-3d37-46ac-8259-6d50a6de1053-kube-api-access-fbkdd\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.655738 4652 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.655748 4652 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.655759 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.665430 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9aaadf80-3d37-46ac-8259-6d50a6de1053" (UID: "9aaadf80-3d37-46ac-8259-6d50a6de1053"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:11 crc kubenswrapper[4652]: I1205 05:44:11.760449 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9aaadf80-3d37-46ac-8259-6d50a6de1053-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.354597 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gqz4t" event={"ID":"9aaadf80-3d37-46ac-8259-6d50a6de1053","Type":"ContainerDied","Data":"e9cda0951f93e161a73d880db0d0332f91cd455c4a2f31bc388e72cdf32a5c4d"} Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.355132 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9cda0951f93e161a73d880db0d0332f91cd455c4a2f31bc388e72cdf32a5c4d" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.354648 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gqz4t" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.641184 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-gqz4t"] Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.657888 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-gqz4t"] Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.736086 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-ghfhc"] Dec 05 05:44:12 crc kubenswrapper[4652]: E1205 05:44:12.736677 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aaadf80-3d37-46ac-8259-6d50a6de1053" containerName="keystone-bootstrap" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.736741 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aaadf80-3d37-46ac-8259-6d50a6de1053" containerName="keystone-bootstrap" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.737953 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aaadf80-3d37-46ac-8259-6d50a6de1053" containerName="keystone-bootstrap" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.738739 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.741965 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.742230 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.742359 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.742479 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.742684 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-h2hbv" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.744598 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ghfhc"] Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.884380 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsqxv\" (UniqueName: \"kubernetes.io/projected/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-kube-api-access-bsqxv\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.884430 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-credential-keys\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.884573 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-scripts\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.884799 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-combined-ca-bundle\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.884954 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-config-data\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.885092 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-fernet-keys\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.986461 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-fernet-keys\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.986651 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsqxv\" (UniqueName: \"kubernetes.io/projected/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-kube-api-access-bsqxv\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.986770 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-credential-keys\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.986878 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-scripts\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.987034 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-combined-ca-bundle\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.987154 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-config-data\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.992401 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-credential-keys\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.992628 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-fernet-keys\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.992950 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-combined-ca-bundle\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.993180 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-scripts\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:12 crc kubenswrapper[4652]: I1205 05:44:12.993934 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-config-data\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:13 crc kubenswrapper[4652]: I1205 05:44:13.005489 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsqxv\" (UniqueName: \"kubernetes.io/projected/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-kube-api-access-bsqxv\") pod \"keystone-bootstrap-ghfhc\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:13 crc kubenswrapper[4652]: I1205 05:44:13.058581 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:14 crc kubenswrapper[4652]: I1205 05:44:14.134876 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aaadf80-3d37-46ac-8259-6d50a6de1053" path="/var/lib/kubelet/pods/9aaadf80-3d37-46ac-8259-6d50a6de1053/volumes" Dec 05 05:44:14 crc kubenswrapper[4652]: I1205 05:44:14.372810 4652 generic.go:334] "Generic (PLEG): container finished" podID="298ca280-89e1-42fc-8d46-b63f6588896f" containerID="cc9549f1668c5d9c63a3650a583b150712103f246970d896899fbb3c64173558" exitCode=0 Dec 05 05:44:14 crc kubenswrapper[4652]: I1205 05:44:14.372866 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pvs56" event={"ID":"298ca280-89e1-42fc-8d46-b63f6588896f","Type":"ContainerDied","Data":"cc9549f1668c5d9c63a3650a583b150712103f246970d896899fbb3c64173558"} Dec 05 05:44:14 crc kubenswrapper[4652]: I1205 05:44:14.871960 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 05 05:44:14 crc kubenswrapper[4652]: I1205 05:44:14.877997 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 05 05:44:15 crc kubenswrapper[4652]: I1205 05:44:15.395714 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.287774 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.399889 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.400524 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" event={"ID":"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1","Type":"ContainerDied","Data":"8008ec6549b9e647282b5035ce3ba8ead51c170eb11d246989ec63b017e94efc"} Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.400570 4652 scope.go:117] "RemoveContainer" containerID="b5a31b91879b3d21f74f2d2dacf5daed3d54cafc6490c1747146e34c5f65fb0e" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.464504 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-nb\") pod \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.464576 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwnj9\" (UniqueName: \"kubernetes.io/projected/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-kube-api-access-pwnj9\") pod \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.464634 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-svc\") pod \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.464657 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-sb\") pod \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.464762 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-swift-storage-0\") pod \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.464850 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-config\") pod \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\" (UID: \"d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1\") " Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.470865 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-kube-api-access-pwnj9" (OuterVolumeSpecName: "kube-api-access-pwnj9") pod "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" (UID: "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1"). InnerVolumeSpecName "kube-api-access-pwnj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.507105 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-config" (OuterVolumeSpecName: "config") pod "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" (UID: "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.511851 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" (UID: "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.514644 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" (UID: "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.519404 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" (UID: "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.524389 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" (UID: "d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.567140 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.567170 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.567183 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwnj9\" (UniqueName: \"kubernetes.io/projected/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-kube-api-access-pwnj9\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.567209 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.567218 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.567227 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.733683 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-frd2k"] Dec 05 05:44:16 crc kubenswrapper[4652]: I1205 05:44:16.747782 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55b99bf79c-frd2k"] Dec 05 05:44:17 crc kubenswrapper[4652]: I1205 05:44:17.416953 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-55b99bf79c-frd2k" podUID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: i/o timeout" Dec 05 05:44:18 crc kubenswrapper[4652]: I1205 05:44:18.136521 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" path="/var/lib/kubelet/pods/d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1/volumes" Dec 05 05:44:18 crc kubenswrapper[4652]: I1205 05:44:18.198723 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:18 crc kubenswrapper[4652]: I1205 05:44:18.199145 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api-log" containerID="cri-o://39bdbf32fd4d60e1fb3cca4f805e449046dfb320cf38e6ab856d29891836c2ac" gracePeriod=30 Dec 05 05:44:18 crc kubenswrapper[4652]: I1205 05:44:18.199202 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" containerID="cri-o://e6096da6072b719b80a442ed9d322c33a010864f8440e07b5c390846f064c1cd" gracePeriod=30 Dec 05 05:44:18 crc kubenswrapper[4652]: I1205 05:44:18.418607 4652 generic.go:334] "Generic (PLEG): container finished" podID="b8a23085-d070-48b5-8253-5c991de8bd53" containerID="39bdbf32fd4d60e1fb3cca4f805e449046dfb320cf38e6ab856d29891836c2ac" exitCode=143 Dec 05 05:44:18 crc kubenswrapper[4652]: I1205 05:44:18.418649 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b8a23085-d070-48b5-8253-5c991de8bd53","Type":"ContainerDied","Data":"39bdbf32fd4d60e1fb3cca4f805e449046dfb320cf38e6ab856d29891836c2ac"} Dec 05 05:44:19 crc kubenswrapper[4652]: I1205 05:44:19.431045 4652 generic.go:334] "Generic (PLEG): container finished" podID="b8a23085-d070-48b5-8253-5c991de8bd53" containerID="e6096da6072b719b80a442ed9d322c33a010864f8440e07b5c390846f064c1cd" exitCode=0 Dec 05 05:44:19 crc kubenswrapper[4652]: I1205 05:44:19.431267 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b8a23085-d070-48b5-8253-5c991de8bd53","Type":"ContainerDied","Data":"e6096da6072b719b80a442ed9d322c33a010864f8440e07b5c390846f064c1cd"} Dec 05 05:44:19 crc kubenswrapper[4652]: I1205 05:44:19.872624 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": dial tcp 10.217.0.158:9322: connect: connection refused" Dec 05 05:44:19 crc kubenswrapper[4652]: I1205 05:44:19.872632 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": dial tcp 10.217.0.158:9322: connect: connection refused" Dec 05 05:44:20 crc kubenswrapper[4652]: E1205 05:44:20.213363 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330ff405_1db6_4136_b17b_679168d3125b.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:44:20 crc kubenswrapper[4652]: I1205 05:44:20.940133 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.074872 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxlzp\" (UniqueName: \"kubernetes.io/projected/298ca280-89e1-42fc-8d46-b63f6588896f-kube-api-access-cxlzp\") pod \"298ca280-89e1-42fc-8d46-b63f6588896f\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.075173 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-combined-ca-bundle\") pod \"298ca280-89e1-42fc-8d46-b63f6588896f\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.075249 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-config\") pod \"298ca280-89e1-42fc-8d46-b63f6588896f\" (UID: \"298ca280-89e1-42fc-8d46-b63f6588896f\") " Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.078317 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/298ca280-89e1-42fc-8d46-b63f6588896f-kube-api-access-cxlzp" (OuterVolumeSpecName: "kube-api-access-cxlzp") pod "298ca280-89e1-42fc-8d46-b63f6588896f" (UID: "298ca280-89e1-42fc-8d46-b63f6588896f"). InnerVolumeSpecName "kube-api-access-cxlzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.095144 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "298ca280-89e1-42fc-8d46-b63f6588896f" (UID: "298ca280-89e1-42fc-8d46-b63f6588896f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.095474 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-config" (OuterVolumeSpecName: "config") pod "298ca280-89e1-42fc-8d46-b63f6588896f" (UID: "298ca280-89e1-42fc-8d46-b63f6588896f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.177619 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.177643 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxlzp\" (UniqueName: \"kubernetes.io/projected/298ca280-89e1-42fc-8d46-b63f6588896f-kube-api-access-cxlzp\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.177654 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/298ca280-89e1-42fc-8d46-b63f6588896f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:21 crc kubenswrapper[4652]: E1205 05:44:21.247163 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current" Dec 05 05:44:21 crc kubenswrapper[4652]: E1205 05:44:21.247217 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current" Dec 05 05:44:21 crc kubenswrapper[4652]: E1205 05:44:21.247323 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pss7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-wqg7s_openstack(3415917b-2730-494b-b474-b1bd9c1d08f5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:44:21 crc kubenswrapper[4652]: E1205 05:44:21.249428 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-wqg7s" podUID="3415917b-2730-494b-b474-b1bd9c1d08f5" Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.445336 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pvs56" event={"ID":"298ca280-89e1-42fc-8d46-b63f6588896f","Type":"ContainerDied","Data":"7157a9487fe7eada5e68e53d7dfcaa38333229795eff27006694d6360a5e0d75"} Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.445503 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7157a9487fe7eada5e68e53d7dfcaa38333229795eff27006694d6360a5e0d75" Dec 05 05:44:21 crc kubenswrapper[4652]: I1205 05:44:21.445618 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pvs56" Dec 05 05:44:21 crc kubenswrapper[4652]: E1205 05:44:21.446803 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-barbican-api:current\\\"\"" pod="openstack/barbican-db-sync-wqg7s" podUID="3415917b-2730-494b-b474-b1bd9c1d08f5" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.162181 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7648c6b969-zswpk"] Dec 05 05:44:22 crc kubenswrapper[4652]: E1205 05:44:22.162752 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="298ca280-89e1-42fc-8d46-b63f6588896f" containerName="neutron-db-sync" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.162765 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="298ca280-89e1-42fc-8d46-b63f6588896f" containerName="neutron-db-sync" Dec 05 05:44:22 crc kubenswrapper[4652]: E1205 05:44:22.162784 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerName="init" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.162790 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerName="init" Dec 05 05:44:22 crc kubenswrapper[4652]: E1205 05:44:22.162798 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerName="dnsmasq-dns" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.162804 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerName="dnsmasq-dns" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.162976 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4fc9323-0ed0-4c26-b25f-c7c61cf07eb1" containerName="dnsmasq-dns" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.162991 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="298ca280-89e1-42fc-8d46-b63f6588896f" containerName="neutron-db-sync" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.163881 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.169545 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7648c6b969-zswpk"] Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.301649 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-svc\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.301787 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-config\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.301860 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-nb\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.301974 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-sb\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.302011 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-swift-storage-0\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.302030 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgrvw\" (UniqueName: \"kubernetes.io/projected/26ad9bc5-b4f1-476b-8d50-d44153670d74-kube-api-access-fgrvw\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.403185 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5f57444c8-9lkq9"] Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.403575 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-swift-storage-0\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.403625 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgrvw\" (UniqueName: \"kubernetes.io/projected/26ad9bc5-b4f1-476b-8d50-d44153670d74-kube-api-access-fgrvw\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.403672 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-svc\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.403767 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-config\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.403850 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-nb\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.403940 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-sb\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.404459 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.404612 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-swift-storage-0\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.404665 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-config\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.404758 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-svc\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.405029 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-nb\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.405074 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-sb\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.405751 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.405999 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.406122 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.408071 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nsk9t" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.414826 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f57444c8-9lkq9"] Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.444064 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgrvw\" (UniqueName: \"kubernetes.io/projected/26ad9bc5-b4f1-476b-8d50-d44153670d74-kube-api-access-fgrvw\") pod \"dnsmasq-dns-7648c6b969-zswpk\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.495180 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.505064 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-httpd-config\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.505130 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-config\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.505185 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqh5j\" (UniqueName: \"kubernetes.io/projected/e90f4813-fb7c-4375-9e7d-94673381ffae-kube-api-access-wqh5j\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.505420 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-combined-ca-bundle\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.505538 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-ovndb-tls-certs\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.606980 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-combined-ca-bundle\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.607427 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-ovndb-tls-certs\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.607484 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-httpd-config\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.607529 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-config\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.607604 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqh5j\" (UniqueName: \"kubernetes.io/projected/e90f4813-fb7c-4375-9e7d-94673381ffae-kube-api-access-wqh5j\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.610418 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-combined-ca-bundle\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.610533 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-ovndb-tls-certs\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.615903 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-httpd-config\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.626079 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-config\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.630059 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqh5j\" (UniqueName: \"kubernetes.io/projected/e90f4813-fb7c-4375-9e7d-94673381ffae-kube-api-access-wqh5j\") pod \"neutron-5f57444c8-9lkq9\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:22 crc kubenswrapper[4652]: I1205 05:44:22.722267 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.576830 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-9f8cfd587-c4hb6"] Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.578447 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.579675 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.580188 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.588659 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9f8cfd587-c4hb6"] Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.750786 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-internal-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.750845 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-public-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.751085 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhd4s\" (UniqueName: \"kubernetes.io/projected/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-kube-api-access-rhd4s\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.751144 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-config\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.751169 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-ovndb-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.751234 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-combined-ca-bundle\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.751282 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-httpd-config\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.857171 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhd4s\" (UniqueName: \"kubernetes.io/projected/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-kube-api-access-rhd4s\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.857230 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-config\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.857257 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-ovndb-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.857307 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-combined-ca-bundle\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.857347 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-httpd-config\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.857448 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-internal-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.857466 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-public-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.861793 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-public-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.861982 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-internal-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.862324 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-httpd-config\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.863697 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-config\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.869284 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-ovndb-tls-certs\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.870092 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-combined-ca-bundle\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.873112 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhd4s\" (UniqueName: \"kubernetes.io/projected/ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef-kube-api-access-rhd4s\") pod \"neutron-9f8cfd587-c4hb6\" (UID: \"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef\") " pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:24 crc kubenswrapper[4652]: I1205 05:44:24.907039 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:27 crc kubenswrapper[4652]: E1205 05:44:27.323357 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-glance-api:current" Dec 05 05:44:27 crc kubenswrapper[4652]: E1205 05:44:27.323704 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-glance-api:current" Dec 05 05:44:27 crc kubenswrapper[4652]: E1205 05:44:27.323816 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-glance-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sg7pc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-nmg5q_openstack(a1e961f4-2398-4a5e-a424-e8066a6a7c78): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:44:27 crc kubenswrapper[4652]: E1205 05:44:27.325098 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-nmg5q" podUID="a1e961f4-2398-4a5e-a424-e8066a6a7c78" Dec 05 05:44:27 crc kubenswrapper[4652]: E1205 05:44:27.505889 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-glance-api:current\\\"\"" pod="openstack/glance-db-sync-nmg5q" podUID="a1e961f4-2398-4a5e-a424-e8066a6a7c78" Dec 05 05:44:29 crc kubenswrapper[4652]: I1205 05:44:29.872730 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:44:29 crc kubenswrapper[4652]: I1205 05:44:29.873303 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:44:30 crc kubenswrapper[4652]: E1205 05:44:30.406066 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330ff405_1db6_4136_b17b_679168d3125b.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:44:34 crc kubenswrapper[4652]: I1205 05:44:34.150052 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:44:34 crc kubenswrapper[4652]: I1205 05:44:34.150450 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:44:34 crc kubenswrapper[4652]: I1205 05:44:34.874022 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:44:34 crc kubenswrapper[4652]: I1205 05:44:34.874051 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:44:34 crc kubenswrapper[4652]: I1205 05:44:34.874169 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 05:44:34 crc kubenswrapper[4652]: I1205 05:44:34.874211 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 05:44:35 crc kubenswrapper[4652]: I1205 05:44:35.870946 4652 scope.go:117] "RemoveContainer" containerID="4e1436f0144c95ea61b05b17005c147a1a8480993df19268e0bffaf48f4b22a2" Dec 05 05:44:35 crc kubenswrapper[4652]: I1205 05:44:35.932299 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.042499 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lc2q\" (UniqueName: \"kubernetes.io/projected/b8a23085-d070-48b5-8253-5c991de8bd53-kube-api-access-7lc2q\") pod \"b8a23085-d070-48b5-8253-5c991de8bd53\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.042542 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8a23085-d070-48b5-8253-5c991de8bd53-logs\") pod \"b8a23085-d070-48b5-8253-5c991de8bd53\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.042636 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-custom-prometheus-ca\") pod \"b8a23085-d070-48b5-8253-5c991de8bd53\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.042651 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-combined-ca-bundle\") pod \"b8a23085-d070-48b5-8253-5c991de8bd53\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.042734 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-config-data\") pod \"b8a23085-d070-48b5-8253-5c991de8bd53\" (UID: \"b8a23085-d070-48b5-8253-5c991de8bd53\") " Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.044299 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8a23085-d070-48b5-8253-5c991de8bd53-logs" (OuterVolumeSpecName: "logs") pod "b8a23085-d070-48b5-8253-5c991de8bd53" (UID: "b8a23085-d070-48b5-8253-5c991de8bd53"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.048448 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8a23085-d070-48b5-8253-5c991de8bd53-kube-api-access-7lc2q" (OuterVolumeSpecName: "kube-api-access-7lc2q") pod "b8a23085-d070-48b5-8253-5c991de8bd53" (UID: "b8a23085-d070-48b5-8253-5c991de8bd53"). InnerVolumeSpecName "kube-api-access-7lc2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.063036 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "b8a23085-d070-48b5-8253-5c991de8bd53" (UID: "b8a23085-d070-48b5-8253-5c991de8bd53"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.068312 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8a23085-d070-48b5-8253-5c991de8bd53" (UID: "b8a23085-d070-48b5-8253-5c991de8bd53"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.085947 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-config-data" (OuterVolumeSpecName: "config-data") pod "b8a23085-d070-48b5-8253-5c991de8bd53" (UID: "b8a23085-d070-48b5-8253-5c991de8bd53"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.145096 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lc2q\" (UniqueName: \"kubernetes.io/projected/b8a23085-d070-48b5-8253-5c991de8bd53-kube-api-access-7lc2q\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.145117 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8a23085-d070-48b5-8253-5c991de8bd53-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.145127 4652 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.145136 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.145144 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8a23085-d070-48b5-8253-5c991de8bd53-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.215733 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7744f5c9d6-t75mf"] Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.273720 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c77c7b944-twjsn"] Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.571286 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"b8a23085-d070-48b5-8253-5c991de8bd53","Type":"ContainerDied","Data":"37bc889e8ea032b5c2a31d16d139d4bce29eb3b7073796459529947dc2ffcc39"} Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.571342 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.592885 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.603167 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.616008 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:36 crc kubenswrapper[4652]: E1205 05:44:36.616394 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api-log" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.616412 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api-log" Dec 05 05:44:36 crc kubenswrapper[4652]: E1205 05:44:36.616421 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.616426 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.616653 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.616683 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api-log" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.617634 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.619792 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.620173 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.620366 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.623236 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.754643 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-public-tls-certs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.754705 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmvq4\" (UniqueName: \"kubernetes.io/projected/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-kube-api-access-wmvq4\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.754826 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.754994 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.755207 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.755264 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-config-data\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.755317 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-logs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: E1205 05:44:36.802948 4652 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current" Dec 05 05:44:36 crc kubenswrapper[4652]: E1205 05:44:36.802992 4652 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current" Dec 05 05:44:36 crc kubenswrapper[4652]: E1205 05:44:36.803100 4652 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ffkhs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-xcf4m_openstack(266e7065-7af6-4547-b7bb-5e981e095969): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 05:44:36 crc kubenswrapper[4652]: E1205 05:44:36.804257 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-xcf4m" podUID="266e7065-7af6-4547-b7bb-5e981e095969" Dec 05 05:44:36 crc kubenswrapper[4652]: W1205 05:44:36.805807 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e04a533_fea2_4fde_a50b_5852129fa912.slice/crio-66675ee0f6ef1f0335f425f873080723b61535259df2b40a7e2022fbbc77a373 WatchSource:0}: Error finding container 66675ee0f6ef1f0335f425f873080723b61535259df2b40a7e2022fbbc77a373: Status 404 returned error can't find the container with id 66675ee0f6ef1f0335f425f873080723b61535259df2b40a7e2022fbbc77a373 Dec 05 05:44:36 crc kubenswrapper[4652]: W1205 05:44:36.806833 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1465128_fcb6_49f8_8879_96e87d51b967.slice/crio-2199925fe3dc5d3f79751232a61ad7251bc2391ca3263f063b81c41402b54610 WatchSource:0}: Error finding container 2199925fe3dc5d3f79751232a61ad7251bc2391ca3263f063b81c41402b54610: Status 404 returned error can't find the container with id 2199925fe3dc5d3f79751232a61ad7251bc2391ca3263f063b81c41402b54610 Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.819432 4652 scope.go:117] "RemoveContainer" containerID="e6096da6072b719b80a442ed9d322c33a010864f8440e07b5c390846f064c1cd" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.858586 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-logs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.858698 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-public-tls-certs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.858747 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmvq4\" (UniqueName: \"kubernetes.io/projected/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-kube-api-access-wmvq4\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.858794 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.858883 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.859011 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.859054 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-config-data\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.859161 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-logs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.863048 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-public-tls-certs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.864504 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-config-data\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.865627 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.867430 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.869497 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.873231 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmvq4\" (UniqueName: \"kubernetes.io/projected/bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8-kube-api-access-wmvq4\") pod \"watcher-api-0\" (UID: \"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8\") " pod="openstack/watcher-api-0" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.900749 4652 scope.go:117] "RemoveContainer" containerID="39bdbf32fd4d60e1fb3cca4f805e449046dfb320cf38e6ab856d29891836c2ac" Dec 05 05:44:36 crc kubenswrapper[4652]: I1205 05:44:36.934110 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.194189 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ghfhc"] Dec 05 05:44:37 crc kubenswrapper[4652]: W1205 05:44:37.206756 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c70e1c4_c49c_4cb6_adec_0173ebe53d17.slice/crio-ab21b431b0f732e9363a20d1860566b6725d42db6213fa3db27a0097e2086451 WatchSource:0}: Error finding container ab21b431b0f732e9363a20d1860566b6725d42db6213fa3db27a0097e2086451: Status 404 returned error can't find the container with id ab21b431b0f732e9363a20d1860566b6725d42db6213fa3db27a0097e2086451 Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.269529 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7648c6b969-zswpk"] Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.435409 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.527316 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f57444c8-9lkq9"] Dec 05 05:44:37 crc kubenswrapper[4652]: W1205 05:44:37.540164 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode90f4813_fb7c_4375_9e7d_94673381ffae.slice/crio-88bfdda16b92ce1232617c3993fbdc015866fc86d87db96d8588e9b159185805 WatchSource:0}: Error finding container 88bfdda16b92ce1232617c3993fbdc015866fc86d87db96d8588e9b159185805: Status 404 returned error can't find the container with id 88bfdda16b92ce1232617c3993fbdc015866fc86d87db96d8588e9b159185805 Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.591202 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84bb48cbc7-nw55m" event={"ID":"8b79d3c1-ccd2-454f-979d-67561bb7cfe5","Type":"ContainerStarted","Data":"32da43783283983c795e30d81e4ab738dbe38a370c5717921c968cf9b542a648"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.593228 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c77c7b944-twjsn" event={"ID":"a1465128-fcb6-49f8-8879-96e87d51b967","Type":"ContainerStarted","Data":"db58096afc35f643f02ae32e10ab2ee33e2f31ddcfd738bdddff963319467902"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.593254 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c77c7b944-twjsn" event={"ID":"a1465128-fcb6-49f8-8879-96e87d51b967","Type":"ContainerStarted","Data":"2199925fe3dc5d3f79751232a61ad7251bc2391ca3263f063b81c41402b54610"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.603686 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-wqg7s" event={"ID":"3415917b-2730-494b-b474-b1bd9c1d08f5","Type":"ContainerStarted","Data":"b076ceaec9e7bdc2a7d3bfb34652c27b93881bbef018e710e411b902720ed3ce"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.638923 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-wqg7s" podStartSLOduration=1.960575905 podStartE2EDuration="37.638903638s" podCreationTimestamp="2025-12-05 05:44:00 +0000 UTC" firstStartedPulling="2025-12-05 05:44:01.313317785 +0000 UTC m=+1043.550048051" lastFinishedPulling="2025-12-05 05:44:36.991645517 +0000 UTC m=+1079.228375784" observedRunningTime="2025-12-05 05:44:37.62878502 +0000 UTC m=+1079.865515297" watchObservedRunningTime="2025-12-05 05:44:37.638903638 +0000 UTC m=+1079.875633904" Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.657149 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"e7ae16a1-df00-4d9b-bd3e-16d2d81946d4","Type":"ContainerStarted","Data":"5a41dd0a12b096a5233c8a1cd62801561a2756b5dcb740e53669df618f49baee"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.677722 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8","Type":"ContainerStarted","Data":"91a386b1133864de69a6a9cd421b4564eee7b13794aec84e5256b53f0ccd4585"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.690713 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ghfhc" event={"ID":"5c70e1c4-c49c-4cb6-adec-0173ebe53d17","Type":"ContainerStarted","Data":"336a971cfb95c60fa6c21060cd5cd46635c92ff06669ea0717d368571f4883de"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.690754 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ghfhc" event={"ID":"5c70e1c4-c49c-4cb6-adec-0173ebe53d17","Type":"ContainerStarted","Data":"ab21b431b0f732e9363a20d1860566b6725d42db6213fa3db27a0097e2086451"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.691740 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=18.208829795 podStartE2EDuration="33.69172475s" podCreationTimestamp="2025-12-05 05:44:04 +0000 UTC" firstStartedPulling="2025-12-05 05:44:05.770935887 +0000 UTC m=+1048.007666145" lastFinishedPulling="2025-12-05 05:44:21.253830834 +0000 UTC m=+1063.490561100" observedRunningTime="2025-12-05 05:44:37.691428343 +0000 UTC m=+1079.928158610" watchObservedRunningTime="2025-12-05 05:44:37.69172475 +0000 UTC m=+1079.928455017" Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.727583 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-ghfhc" podStartSLOduration=25.727547702 podStartE2EDuration="25.727547702s" podCreationTimestamp="2025-12-05 05:44:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:37.71261907 +0000 UTC m=+1079.949349336" watchObservedRunningTime="2025-12-05 05:44:37.727547702 +0000 UTC m=+1079.964277969" Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.737059 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerStarted","Data":"9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.757767 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7744f5c9d6-t75mf" event={"ID":"4e04a533-fea2-4fde-a50b-5852129fa912","Type":"ContainerStarted","Data":"1a97cb99478e8e1f70f698c3e507c2e8e7e789636755f4bc50bd106c55435730"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.757812 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7744f5c9d6-t75mf" event={"ID":"4e04a533-fea2-4fde-a50b-5852129fa912","Type":"ContainerStarted","Data":"66675ee0f6ef1f0335f425f873080723b61535259df2b40a7e2022fbbc77a373"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.772379 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f57444c8-9lkq9" event={"ID":"e90f4813-fb7c-4375-9e7d-94673381ffae","Type":"ContainerStarted","Data":"88bfdda16b92ce1232617c3993fbdc015866fc86d87db96d8588e9b159185805"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.778260 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerStarted","Data":"eabaaa268d144d039a172c70b9f7f90078e7b3ba8da2dd7855954b59161d2790"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.789750 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-mlkhj" event={"ID":"86c35465-1240-412c-9182-99d8ed10f948","Type":"ContainerStarted","Data":"f16e2f4bfc579d4b966a1f43005c195b5a798be9b6f0f93fd05feff323b6a443"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.791359 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" event={"ID":"26ad9bc5-b4f1-476b-8d50-d44153670d74","Type":"ContainerStarted","Data":"fe486f6d3e9d0d799bc946ff1d4ff121ef3fdada650f994be0c2d4f09e9f444a"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.793776 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d8787bf9c-pxslp" event={"ID":"34e6630c-45d1-4105-9ce1-a22701d0231a","Type":"ContainerStarted","Data":"9235f17f27e40459f8bc575435ac465e6ca3d02cad1a4b541f49f4638e4e7925"} Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.810245 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=17.884985475 podStartE2EDuration="33.810228645s" podCreationTimestamp="2025-12-05 05:44:04 +0000 UTC" firstStartedPulling="2025-12-05 05:44:05.328529674 +0000 UTC m=+1047.565259941" lastFinishedPulling="2025-12-05 05:44:21.253772845 +0000 UTC m=+1063.490503111" observedRunningTime="2025-12-05 05:44:37.804056552 +0000 UTC m=+1080.040786818" watchObservedRunningTime="2025-12-05 05:44:37.810228645 +0000 UTC m=+1080.046958913" Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.820001 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6795c4fff7-6rkzk" event={"ID":"b190f7c2-4b39-46ac-a309-fdee7641c525","Type":"ContainerStarted","Data":"2affdee24e868032ec37bbabb80da0d75aa95d821419d6c7949bd48514e8ac22"} Dec 05 05:44:37 crc kubenswrapper[4652]: E1205 05:44:37.833458 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cinder-api:current\\\"\"" pod="openstack/cinder-db-sync-xcf4m" podUID="266e7065-7af6-4547-b7bb-5e981e095969" Dec 05 05:44:37 crc kubenswrapper[4652]: I1205 05:44:37.849417 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-mlkhj" podStartSLOduration=18.132587311 podStartE2EDuration="37.849399386s" podCreationTimestamp="2025-12-05 05:44:00 +0000 UTC" firstStartedPulling="2025-12-05 05:44:01.551873933 +0000 UTC m=+1043.788604200" lastFinishedPulling="2025-12-05 05:44:21.268686008 +0000 UTC m=+1063.505416275" observedRunningTime="2025-12-05 05:44:37.829943299 +0000 UTC m=+1080.066673565" watchObservedRunningTime="2025-12-05 05:44:37.849399386 +0000 UTC m=+1080.086129654" Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.164843 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" path="/var/lib/kubelet/pods/b8a23085-d070-48b5-8253-5c991de8bd53/volumes" Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.457016 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9f8cfd587-c4hb6"] Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.877477 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8","Type":"ContainerStarted","Data":"25297feea6acdc6d1bc4eb5c3fac54f47becbaa55caa6063e20c65e7440dc498"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.878003 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8","Type":"ContainerStarted","Data":"ab7b1f05adf7ae448279cf5d7116554dfded567daf6ba48d576cf8fd5f2ef731"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.878593 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.893548 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6795c4fff7-6rkzk" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerName="horizon-log" containerID="cri-o://2affdee24e868032ec37bbabb80da0d75aa95d821419d6c7949bd48514e8ac22" gracePeriod=30 Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.893887 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6795c4fff7-6rkzk" event={"ID":"b190f7c2-4b39-46ac-a309-fdee7641c525","Type":"ContainerStarted","Data":"1701b339c29bc1d513460590c57bd08dc0006707afedcd971813754b44af2018"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.894031 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6795c4fff7-6rkzk" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerName="horizon" containerID="cri-o://1701b339c29bc1d513460590c57bd08dc0006707afedcd971813754b44af2018" gracePeriod=30 Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.909624 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=2.909602119 podStartE2EDuration="2.909602119s" podCreationTimestamp="2025-12-05 05:44:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:38.904169343 +0000 UTC m=+1081.140899610" watchObservedRunningTime="2025-12-05 05:44:38.909602119 +0000 UTC m=+1081.146332385" Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.919396 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f57444c8-9lkq9" event={"ID":"e90f4813-fb7c-4375-9e7d-94673381ffae","Type":"ContainerStarted","Data":"df2d7400d42d396963dfe9abe80aa7a96d39cd8cefed0945dfa7b52a3fba2906"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.919446 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f57444c8-9lkq9" event={"ID":"e90f4813-fb7c-4375-9e7d-94673381ffae","Type":"ContainerStarted","Data":"33d9447d060975f3b4a6598751b46ebaf05b54df8ff59a1f14204e7f2c5ca06e"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.920633 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.935226 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6795c4fff7-6rkzk" podStartSLOduration=3.6412807689999998 podStartE2EDuration="38.935211786s" podCreationTimestamp="2025-12-05 05:44:00 +0000 UTC" firstStartedPulling="2025-12-05 05:44:01.577701048 +0000 UTC m=+1043.814431306" lastFinishedPulling="2025-12-05 05:44:36.871632056 +0000 UTC m=+1079.108362323" observedRunningTime="2025-12-05 05:44:38.922021939 +0000 UTC m=+1081.158752206" watchObservedRunningTime="2025-12-05 05:44:38.935211786 +0000 UTC m=+1081.171942052" Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.935761 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d8787bf9c-pxslp" event={"ID":"34e6630c-45d1-4105-9ce1-a22701d0231a","Type":"ContainerStarted","Data":"81a38f6a597eb65f06ee0d1a27cf5ace01addd802c4ccca86938fd574bc77362"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.936008 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7d8787bf9c-pxslp" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerName="horizon-log" containerID="cri-o://9235f17f27e40459f8bc575435ac465e6ca3d02cad1a4b541f49f4638e4e7925" gracePeriod=30 Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.936174 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7d8787bf9c-pxslp" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerName="horizon" containerID="cri-o://81a38f6a597eb65f06ee0d1a27cf5ace01addd802c4ccca86938fd574bc77362" gracePeriod=30 Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.951867 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5f57444c8-9lkq9" podStartSLOduration=16.951856441 podStartE2EDuration="16.951856441s" podCreationTimestamp="2025-12-05 05:44:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:38.944431585 +0000 UTC m=+1081.181161851" watchObservedRunningTime="2025-12-05 05:44:38.951856441 +0000 UTC m=+1081.188586709" Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.954704 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9f8cfd587-c4hb6" event={"ID":"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef","Type":"ContainerStarted","Data":"ada696f1c2c1ff226fec5c395290c7d3683e3c057b84ccd36879bc7972b2b4c1"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.954755 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9f8cfd587-c4hb6" event={"ID":"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef","Type":"ContainerStarted","Data":"d6da558c6041e17b768fbecd6b2f484b04d105e9624022da98c915fd7474f731"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.958396 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7744f5c9d6-t75mf" event={"ID":"4e04a533-fea2-4fde-a50b-5852129fa912","Type":"ContainerStarted","Data":"7c9d3fec70cacda0b535157a56f5d7fa1a3c1cedf136c200c7a49e146e79f017"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.971517 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84bb48cbc7-nw55m" event={"ID":"8b79d3c1-ccd2-454f-979d-67561bb7cfe5","Type":"ContainerStarted","Data":"0d281e2fb4275d320342055350ad8b02619345a27b14e142b0644ec0fbef89cc"} Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.971717 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-84bb48cbc7-nw55m" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerName="horizon" containerID="cri-o://0d281e2fb4275d320342055350ad8b02619345a27b14e142b0644ec0fbef89cc" gracePeriod=30 Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.971709 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-84bb48cbc7-nw55m" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerName="horizon-log" containerID="cri-o://32da43783283983c795e30d81e4ab738dbe38a370c5717921c968cf9b542a648" gracePeriod=30 Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.974141 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7d8787bf9c-pxslp" podStartSLOduration=3.415965802 podStartE2EDuration="38.974131324s" podCreationTimestamp="2025-12-05 05:44:00 +0000 UTC" firstStartedPulling="2025-12-05 05:44:01.312876826 +0000 UTC m=+1043.549607093" lastFinishedPulling="2025-12-05 05:44:36.871042348 +0000 UTC m=+1079.107772615" observedRunningTime="2025-12-05 05:44:38.967208982 +0000 UTC m=+1081.203939249" watchObservedRunningTime="2025-12-05 05:44:38.974131324 +0000 UTC m=+1081.210861591" Dec 05 05:44:38 crc kubenswrapper[4652]: I1205 05:44:38.983160 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c77c7b944-twjsn" event={"ID":"a1465128-fcb6-49f8-8879-96e87d51b967","Type":"ContainerStarted","Data":"97ef3f108d909c774797ce1f3ca0f9653e7349d952e0fda5c5a807063cceb436"} Dec 05 05:44:39 crc kubenswrapper[4652]: I1205 05:44:38.994532 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7744f5c9d6-t75mf" podStartSLOduration=29.994520565 podStartE2EDuration="29.994520565s" podCreationTimestamp="2025-12-05 05:44:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:38.983401659 +0000 UTC m=+1081.220131926" watchObservedRunningTime="2025-12-05 05:44:38.994520565 +0000 UTC m=+1081.231250832" Dec 05 05:44:39 crc kubenswrapper[4652]: I1205 05:44:39.005070 4652 generic.go:334] "Generic (PLEG): container finished" podID="26ad9bc5-b4f1-476b-8d50-d44153670d74" containerID="e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511" exitCode=0 Dec 05 05:44:39 crc kubenswrapper[4652]: I1205 05:44:39.006047 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" event={"ID":"26ad9bc5-b4f1-476b-8d50-d44153670d74","Type":"ContainerDied","Data":"e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511"} Dec 05 05:44:39 crc kubenswrapper[4652]: I1205 05:44:39.020143 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-c77c7b944-twjsn" podStartSLOduration=30.020132245 podStartE2EDuration="30.020132245s" podCreationTimestamp="2025-12-05 05:44:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:39.016632622 +0000 UTC m=+1081.253362889" watchObservedRunningTime="2025-12-05 05:44:39.020132245 +0000 UTC m=+1081.256862512" Dec 05 05:44:39 crc kubenswrapper[4652]: I1205 05:44:39.115648 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-84bb48cbc7-nw55m" podStartSLOduration=3.460038809 podStartE2EDuration="37.115624995s" podCreationTimestamp="2025-12-05 05:44:02 +0000 UTC" firstStartedPulling="2025-12-05 05:44:03.168225822 +0000 UTC m=+1045.404956089" lastFinishedPulling="2025-12-05 05:44:36.823812008 +0000 UTC m=+1079.060542275" observedRunningTime="2025-12-05 05:44:39.040018191 +0000 UTC m=+1081.276748458" watchObservedRunningTime="2025-12-05 05:44:39.115624995 +0000 UTC m=+1081.352355262" Dec 05 05:44:39 crc kubenswrapper[4652]: I1205 05:44:39.874450 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:44:39 crc kubenswrapper[4652]: I1205 05:44:39.876745 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="b8a23085-d070-48b5-8253-5c991de8bd53" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9322/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:44:40 crc kubenswrapper[4652]: I1205 05:44:40.019993 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:40 crc kubenswrapper[4652]: I1205 05:44:40.020032 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:44:40 crc kubenswrapper[4652]: I1205 05:44:40.074724 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Dec 05 05:44:40 crc kubenswrapper[4652]: I1205 05:44:40.281202 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:40 crc kubenswrapper[4652]: I1205 05:44:40.281492 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:44:40 crc kubenswrapper[4652]: E1205 05:44:40.607701 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3415917b_2730_494b_b474_b1bd9c1d08f5.slice/crio-conmon-b076ceaec9e7bdc2a7d3bfb34652c27b93881bbef018e710e411b902720ed3ce.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3415917b_2730_494b_b474_b1bd9c1d08f5.slice/crio-b076ceaec9e7bdc2a7d3bfb34652c27b93881bbef018e710e411b902720ed3ce.scope\": RecentStats: unable to find data in memory cache]" Dec 05 05:44:40 crc kubenswrapper[4652]: I1205 05:44:40.691340 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:44:40 crc kubenswrapper[4652]: I1205 05:44:40.738197 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.047027 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerStarted","Data":"6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58"} Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.048649 4652 generic.go:334] "Generic (PLEG): container finished" podID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerID="eabaaa268d144d039a172c70b9f7f90078e7b3ba8da2dd7855954b59161d2790" exitCode=1 Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.048741 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerDied","Data":"eabaaa268d144d039a172c70b9f7f90078e7b3ba8da2dd7855954b59161d2790"} Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.049666 4652 scope.go:117] "RemoveContainer" containerID="eabaaa268d144d039a172c70b9f7f90078e7b3ba8da2dd7855954b59161d2790" Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.051284 4652 generic.go:334] "Generic (PLEG): container finished" podID="3415917b-2730-494b-b474-b1bd9c1d08f5" containerID="b076ceaec9e7bdc2a7d3bfb34652c27b93881bbef018e710e411b902720ed3ce" exitCode=0 Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.051344 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-wqg7s" event={"ID":"3415917b-2730-494b-b474-b1bd9c1d08f5","Type":"ContainerDied","Data":"b076ceaec9e7bdc2a7d3bfb34652c27b93881bbef018e710e411b902720ed3ce"} Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.052697 4652 generic.go:334] "Generic (PLEG): container finished" podID="86c35465-1240-412c-9182-99d8ed10f948" containerID="f16e2f4bfc579d4b966a1f43005c195b5a798be9b6f0f93fd05feff323b6a443" exitCode=0 Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.052754 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-mlkhj" event={"ID":"86c35465-1240-412c-9182-99d8ed10f948","Type":"ContainerDied","Data":"f16e2f4bfc579d4b966a1f43005c195b5a798be9b6f0f93fd05feff323b6a443"} Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.054665 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" event={"ID":"26ad9bc5-b4f1-476b-8d50-d44153670d74","Type":"ContainerStarted","Data":"98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8"} Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.054999 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.056468 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9f8cfd587-c4hb6" event={"ID":"ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef","Type":"ContainerStarted","Data":"17bb63829e7300cb94823ec9a85a4bf579142fec3f089f5ab8c556ad2ac00c4d"} Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.056844 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.134728 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.148291 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" podStartSLOduration=19.148273472 podStartE2EDuration="19.148273472s" podCreationTimestamp="2025-12-05 05:44:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:41.127997805 +0000 UTC m=+1083.364728072" watchObservedRunningTime="2025-12-05 05:44:41.148273472 +0000 UTC m=+1083.385003739" Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.153796 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-9f8cfd587-c4hb6" podStartSLOduration=17.153781638 podStartE2EDuration="17.153781638s" podCreationTimestamp="2025-12-05 05:44:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:41.14300759 +0000 UTC m=+1083.379737857" watchObservedRunningTime="2025-12-05 05:44:41.153781638 +0000 UTC m=+1083.390511906" Dec 05 05:44:41 crc kubenswrapper[4652]: I1205 05:44:41.934886 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.074625 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerStarted","Data":"45845c4c4d40fa1001367e8f6d74e8f4fc8cddaa1c3feae8f78472a5ef8c3442"} Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.079132 4652 generic.go:334] "Generic (PLEG): container finished" podID="5c70e1c4-c49c-4cb6-adec-0173ebe53d17" containerID="336a971cfb95c60fa6c21060cd5cd46635c92ff06669ea0717d368571f4883de" exitCode=0 Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.079205 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ghfhc" event={"ID":"5c70e1c4-c49c-4cb6-adec-0173ebe53d17","Type":"ContainerDied","Data":"336a971cfb95c60fa6c21060cd5cd46635c92ff06669ea0717d368571f4883de"} Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.512497 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.659605 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.709462 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pss7l\" (UniqueName: \"kubernetes.io/projected/3415917b-2730-494b-b474-b1bd9c1d08f5-kube-api-access-pss7l\") pod \"3415917b-2730-494b-b474-b1bd9c1d08f5\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.709541 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-db-sync-config-data\") pod \"3415917b-2730-494b-b474-b1bd9c1d08f5\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.709949 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-combined-ca-bundle\") pod \"3415917b-2730-494b-b474-b1bd9c1d08f5\" (UID: \"3415917b-2730-494b-b474-b1bd9c1d08f5\") " Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.727765 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3415917b-2730-494b-b474-b1bd9c1d08f5-kube-api-access-pss7l" (OuterVolumeSpecName: "kube-api-access-pss7l") pod "3415917b-2730-494b-b474-b1bd9c1d08f5" (UID: "3415917b-2730-494b-b474-b1bd9c1d08f5"). InnerVolumeSpecName "kube-api-access-pss7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.729233 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3415917b-2730-494b-b474-b1bd9c1d08f5" (UID: "3415917b-2730-494b-b474-b1bd9c1d08f5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.734676 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.772650 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3415917b-2730-494b-b474-b1bd9c1d08f5" (UID: "3415917b-2730-494b-b474-b1bd9c1d08f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.812604 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-combined-ca-bundle\") pod \"86c35465-1240-412c-9182-99d8ed10f948\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.812775 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjhbq\" (UniqueName: \"kubernetes.io/projected/86c35465-1240-412c-9182-99d8ed10f948-kube-api-access-qjhbq\") pod \"86c35465-1240-412c-9182-99d8ed10f948\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.813039 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-config-data\") pod \"86c35465-1240-412c-9182-99d8ed10f948\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.813081 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86c35465-1240-412c-9182-99d8ed10f948-logs\") pod \"86c35465-1240-412c-9182-99d8ed10f948\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.813109 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-scripts\") pod \"86c35465-1240-412c-9182-99d8ed10f948\" (UID: \"86c35465-1240-412c-9182-99d8ed10f948\") " Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.814024 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.814047 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pss7l\" (UniqueName: \"kubernetes.io/projected/3415917b-2730-494b-b474-b1bd9c1d08f5-kube-api-access-pss7l\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.814059 4652 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3415917b-2730-494b-b474-b1bd9c1d08f5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.815823 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86c35465-1240-412c-9182-99d8ed10f948-logs" (OuterVolumeSpecName: "logs") pod "86c35465-1240-412c-9182-99d8ed10f948" (UID: "86c35465-1240-412c-9182-99d8ed10f948"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.817369 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-scripts" (OuterVolumeSpecName: "scripts") pod "86c35465-1240-412c-9182-99d8ed10f948" (UID: "86c35465-1240-412c-9182-99d8ed10f948"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.819694 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86c35465-1240-412c-9182-99d8ed10f948-kube-api-access-qjhbq" (OuterVolumeSpecName: "kube-api-access-qjhbq") pod "86c35465-1240-412c-9182-99d8ed10f948" (UID: "86c35465-1240-412c-9182-99d8ed10f948"). InnerVolumeSpecName "kube-api-access-qjhbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.847776 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-config-data" (OuterVolumeSpecName: "config-data") pod "86c35465-1240-412c-9182-99d8ed10f948" (UID: "86c35465-1240-412c-9182-99d8ed10f948"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.873703 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86c35465-1240-412c-9182-99d8ed10f948" (UID: "86c35465-1240-412c-9182-99d8ed10f948"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.916138 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.916165 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjhbq\" (UniqueName: \"kubernetes.io/projected/86c35465-1240-412c-9182-99d8ed10f948-kube-api-access-qjhbq\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.916179 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.916186 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86c35465-1240-412c-9182-99d8ed10f948-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:42 crc kubenswrapper[4652]: I1205 05:44:42.916194 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86c35465-1240-412c-9182-99d8ed10f948-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.103957 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-wqg7s" event={"ID":"3415917b-2730-494b-b474-b1bd9c1d08f5","Type":"ContainerDied","Data":"68343fc1dbff44a02b7d2fe290c3a8d3ec499c0b1130f91e2d3fff254d7ee07e"} Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.103998 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68343fc1dbff44a02b7d2fe290c3a8d3ec499c0b1130f91e2d3fff254d7ee07e" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.104067 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-wqg7s" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.125214 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-mlkhj" event={"ID":"86c35465-1240-412c-9182-99d8ed10f948","Type":"ContainerDied","Data":"3046734acd569772574e3465386d2519f3b5b95103f288b3ac7464697b25fcec"} Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.125258 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3046734acd569772574e3465386d2519f3b5b95103f288b3ac7464697b25fcec" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.125376 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-mlkhj" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.237623 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-75cb4c4d5b-gg8r8"] Dec 05 05:44:43 crc kubenswrapper[4652]: E1205 05:44:43.238106 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3415917b-2730-494b-b474-b1bd9c1d08f5" containerName="barbican-db-sync" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.238125 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3415917b-2730-494b-b474-b1bd9c1d08f5" containerName="barbican-db-sync" Dec 05 05:44:43 crc kubenswrapper[4652]: E1205 05:44:43.238172 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86c35465-1240-412c-9182-99d8ed10f948" containerName="placement-db-sync" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.238181 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="86c35465-1240-412c-9182-99d8ed10f948" containerName="placement-db-sync" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.238374 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="3415917b-2730-494b-b474-b1bd9c1d08f5" containerName="barbican-db-sync" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.238392 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="86c35465-1240-412c-9182-99d8ed10f948" containerName="placement-db-sync" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.239544 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.243973 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.244321 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-plpwr" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.244451 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.244591 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.244711 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.258892 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75cb4c4d5b-gg8r8"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.390596 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-9d75f794c-grj2p"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.391949 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.393763 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.394537 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.394805 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-jrz9t" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.406771 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5f5b548c68-xzqkg"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.407996 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.421475 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.428786 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-9d75f794c-grj2p"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.431611 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmftq\" (UniqueName: \"kubernetes.io/projected/94817a12-145d-4719-b525-0905f7fdf28c-kube-api-access-wmftq\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.431717 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-internal-tls-certs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.431747 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-public-tls-certs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.431765 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94817a12-145d-4719-b525-0905f7fdf28c-logs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.431787 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-combined-ca-bundle\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.431831 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-scripts\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.431853 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-config-data\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.440771 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5f5b548c68-xzqkg"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.495674 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7648c6b969-zswpk"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.495910 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" podUID="26ad9bc5-b4f1-476b-8d50-d44153670d74" containerName="dnsmasq-dns" containerID="cri-o://98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8" gracePeriod=10 Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533669 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-config-data-custom\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533710 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-internal-tls-certs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533729 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-config-data\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533753 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-public-tls-certs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533770 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94817a12-145d-4719-b525-0905f7fdf28c-logs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533793 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-combined-ca-bundle\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533810 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdpsn\" (UniqueName: \"kubernetes.io/projected/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-kube-api-access-bdpsn\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533835 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgwgw\" (UniqueName: \"kubernetes.io/projected/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-kube-api-access-cgwgw\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533873 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-scripts\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533886 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-combined-ca-bundle\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533907 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-config-data\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533930 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-config-data\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.533976 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-config-data-custom\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.534004 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-logs\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.534027 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmftq\" (UniqueName: \"kubernetes.io/projected/94817a12-145d-4719-b525-0905f7fdf28c-kube-api-access-wmftq\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.534047 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-combined-ca-bundle\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.534158 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-logs\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.544469 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-internal-tls-certs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.545350 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94817a12-145d-4719-b525-0905f7fdf28c-logs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.547625 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-config-data\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.547839 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-public-tls-certs\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.558028 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-combined-ca-bundle\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.560400 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94817a12-145d-4719-b525-0905f7fdf28c-scripts\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.563594 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmftq\" (UniqueName: \"kubernetes.io/projected/94817a12-145d-4719-b525-0905f7fdf28c-kube-api-access-wmftq\") pod \"placement-75cb4c4d5b-gg8r8\" (UID: \"94817a12-145d-4719-b525-0905f7fdf28c\") " pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.569953 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.573630 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764bcc8bff-9jvjn"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.602018 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.630233 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764bcc8bff-9jvjn"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661525 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-logs\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661617 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-svc\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661664 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-combined-ca-bundle\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661708 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-swift-storage-0\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661740 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-sb\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661786 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-logs\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661846 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-config-data-custom\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661875 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-config-data\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661929 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdpsn\" (UniqueName: \"kubernetes.io/projected/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-kube-api-access-bdpsn\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.661971 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgwgw\" (UniqueName: \"kubernetes.io/projected/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-kube-api-access-cgwgw\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.662048 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-config\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.662082 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-combined-ca-bundle\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.662133 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qls5r\" (UniqueName: \"kubernetes.io/projected/ef71f602-26d2-4185-a82a-8d7906d81786-kube-api-access-qls5r\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.662161 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-config-data\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.662256 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-nb\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.662275 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-config-data-custom\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.669200 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-logs\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.670982 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-config-data-custom\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.672944 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-logs\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.680738 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-combined-ca-bundle\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.681596 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-config-data\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.681837 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-config-data\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.682097 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-combined-ca-bundle\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.683946 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgwgw\" (UniqueName: \"kubernetes.io/projected/3acc1f50-b762-48ef-a1ee-dae3fa8bfe42-kube-api-access-cgwgw\") pod \"barbican-worker-9d75f794c-grj2p\" (UID: \"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42\") " pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.685361 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-config-data-custom\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.693405 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdpsn\" (UniqueName: \"kubernetes.io/projected/7ff94817-36ec-4a5d-957e-ca4ccf1c3982-kube-api-access-bdpsn\") pod \"barbican-keystone-listener-5f5b548c68-xzqkg\" (UID: \"7ff94817-36ec-4a5d-957e-ca4ccf1c3982\") " pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.708328 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-765b8b7694-qt9ss"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.715278 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.725637 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-765b8b7694-qt9ss"] Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.728834 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.740027 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-9d75f794c-grj2p" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.763803 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data-custom\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.763868 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.763900 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/296c2b64-bd68-4e42-beb4-d49560b48ebe-logs\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.763930 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-config\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.763948 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5bxg\" (UniqueName: \"kubernetes.io/projected/296c2b64-bd68-4e42-beb4-d49560b48ebe-kube-api-access-v5bxg\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.763981 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qls5r\" (UniqueName: \"kubernetes.io/projected/ef71f602-26d2-4185-a82a-8d7906d81786-kube-api-access-qls5r\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.764024 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-combined-ca-bundle\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.764039 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-nb\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.764072 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-svc\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.764100 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-swift-storage-0\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.764117 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-sb\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.765177 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-sb\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.765690 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-config\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.766414 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-nb\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.766574 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-svc\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.767468 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-swift-storage-0\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.786197 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qls5r\" (UniqueName: \"kubernetes.io/projected/ef71f602-26d2-4185-a82a-8d7906d81786-kube-api-access-qls5r\") pod \"dnsmasq-dns-764bcc8bff-9jvjn\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.823580 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.843431 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.865527 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data-custom\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.865627 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.865661 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/296c2b64-bd68-4e42-beb4-d49560b48ebe-logs\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.865705 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5bxg\" (UniqueName: \"kubernetes.io/projected/296c2b64-bd68-4e42-beb4-d49560b48ebe-kube-api-access-v5bxg\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.866770 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/296c2b64-bd68-4e42-beb4-d49560b48ebe-logs\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.866992 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.874122 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data-custom\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.880270 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-combined-ca-bundle\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.890351 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-combined-ca-bundle\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.896434 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.914999 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5bxg\" (UniqueName: \"kubernetes.io/projected/296c2b64-bd68-4e42-beb4-d49560b48ebe-kube-api-access-v5bxg\") pod \"barbican-api-765b8b7694-qt9ss\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.985189 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-credential-keys\") pod \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.985245 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-config-data\") pod \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.985290 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsqxv\" (UniqueName: \"kubernetes.io/projected/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-kube-api-access-bsqxv\") pod \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.985415 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-fernet-keys\") pod \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.985444 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-combined-ca-bundle\") pod \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " Dec 05 05:44:43 crc kubenswrapper[4652]: I1205 05:44:43.985486 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-scripts\") pod \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\" (UID: \"5c70e1c4-c49c-4cb6-adec-0173ebe53d17\") " Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.004007 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-scripts" (OuterVolumeSpecName: "scripts") pod "5c70e1c4-c49c-4cb6-adec-0173ebe53d17" (UID: "5c70e1c4-c49c-4cb6-adec-0173ebe53d17"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.004725 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-kube-api-access-bsqxv" (OuterVolumeSpecName: "kube-api-access-bsqxv") pod "5c70e1c4-c49c-4cb6-adec-0173ebe53d17" (UID: "5c70e1c4-c49c-4cb6-adec-0173ebe53d17"). InnerVolumeSpecName "kube-api-access-bsqxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.005094 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5c70e1c4-c49c-4cb6-adec-0173ebe53d17" (UID: "5c70e1c4-c49c-4cb6-adec-0173ebe53d17"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.009670 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5c70e1c4-c49c-4cb6-adec-0173ebe53d17" (UID: "5c70e1c4-c49c-4cb6-adec-0173ebe53d17"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.032068 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c70e1c4-c49c-4cb6-adec-0173ebe53d17" (UID: "5c70e1c4-c49c-4cb6-adec-0173ebe53d17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.066658 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-config-data" (OuterVolumeSpecName: "config-data") pod "5c70e1c4-c49c-4cb6-adec-0173ebe53d17" (UID: "5c70e1c4-c49c-4cb6-adec-0173ebe53d17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.087744 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsqxv\" (UniqueName: \"kubernetes.io/projected/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-kube-api-access-bsqxv\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.087775 4652 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.087786 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.087796 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.087804 4652 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.087811 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c70e1c4-c49c-4cb6-adec-0173ebe53d17-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.150781 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.194906 4652 generic.go:334] "Generic (PLEG): container finished" podID="26ad9bc5-b4f1-476b-8d50-d44153670d74" containerID="98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8" exitCode=0 Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.194985 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" event={"ID":"26ad9bc5-b4f1-476b-8d50-d44153670d74","Type":"ContainerDied","Data":"98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8"} Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.195011 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" event={"ID":"26ad9bc5-b4f1-476b-8d50-d44153670d74","Type":"ContainerDied","Data":"fe486f6d3e9d0d799bc946ff1d4ff121ef3fdada650f994be0c2d4f09e9f444a"} Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.195027 4652 scope.go:117] "RemoveContainer" containerID="98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.195139 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7648c6b969-zswpk" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.204541 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c7bb6b57-bpms4"] Dec 05 05:44:44 crc kubenswrapper[4652]: E1205 05:44:44.204953 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26ad9bc5-b4f1-476b-8d50-d44153670d74" containerName="dnsmasq-dns" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.204971 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="26ad9bc5-b4f1-476b-8d50-d44153670d74" containerName="dnsmasq-dns" Dec 05 05:44:44 crc kubenswrapper[4652]: E1205 05:44:44.204984 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c70e1c4-c49c-4cb6-adec-0173ebe53d17" containerName="keystone-bootstrap" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.204990 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c70e1c4-c49c-4cb6-adec-0173ebe53d17" containerName="keystone-bootstrap" Dec 05 05:44:44 crc kubenswrapper[4652]: E1205 05:44:44.205001 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26ad9bc5-b4f1-476b-8d50-d44153670d74" containerName="init" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.205006 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="26ad9bc5-b4f1-476b-8d50-d44153670d74" containerName="init" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.205172 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="26ad9bc5-b4f1-476b-8d50-d44153670d74" containerName="dnsmasq-dns" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.205197 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c70e1c4-c49c-4cb6-adec-0173ebe53d17" containerName="keystone-bootstrap" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.205808 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.210627 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.210797 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.211359 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nmg5q" event={"ID":"a1e961f4-2398-4a5e-a424-e8066a6a7c78","Type":"ContainerStarted","Data":"5197c73104627b9852dd8720560d7a36be572929880f88a4c8e9d9cb6d23ee93"} Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.213228 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.215021 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c7bb6b57-bpms4"] Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.225003 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ghfhc" event={"ID":"5c70e1c4-c49c-4cb6-adec-0173ebe53d17","Type":"ContainerDied","Data":"ab21b431b0f732e9363a20d1860566b6725d42db6213fa3db27a0097e2086451"} Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.225034 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab21b431b0f732e9363a20d1860566b6725d42db6213fa3db27a0097e2086451" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.225080 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ghfhc" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.260264 4652 scope.go:117] "RemoveContainer" containerID="e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.261882 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-nmg5q" podStartSLOduration=2.9195912269999997 podStartE2EDuration="45.261799277s" podCreationTimestamp="2025-12-05 05:43:59 +0000 UTC" firstStartedPulling="2025-12-05 05:44:00.489477099 +0000 UTC m=+1042.726207366" lastFinishedPulling="2025-12-05 05:44:42.831685149 +0000 UTC m=+1085.068415416" observedRunningTime="2025-12-05 05:44:44.250062048 +0000 UTC m=+1086.486792316" watchObservedRunningTime="2025-12-05 05:44:44.261799277 +0000 UTC m=+1086.498529544" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.291292 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-sb\") pod \"26ad9bc5-b4f1-476b-8d50-d44153670d74\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.291383 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-swift-storage-0\") pod \"26ad9bc5-b4f1-476b-8d50-d44153670d74\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.291570 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-config\") pod \"26ad9bc5-b4f1-476b-8d50-d44153670d74\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.291646 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-nb\") pod \"26ad9bc5-b4f1-476b-8d50-d44153670d74\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.291678 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgrvw\" (UniqueName: \"kubernetes.io/projected/26ad9bc5-b4f1-476b-8d50-d44153670d74-kube-api-access-fgrvw\") pod \"26ad9bc5-b4f1-476b-8d50-d44153670d74\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.291735 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-svc\") pod \"26ad9bc5-b4f1-476b-8d50-d44153670d74\" (UID: \"26ad9bc5-b4f1-476b-8d50-d44153670d74\") " Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.333702 4652 scope.go:117] "RemoveContainer" containerID="98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8" Dec 05 05:44:44 crc kubenswrapper[4652]: E1205 05:44:44.335299 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8\": container with ID starting with 98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8 not found: ID does not exist" containerID="98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.335338 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8"} err="failed to get container status \"98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8\": rpc error: code = NotFound desc = could not find container \"98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8\": container with ID starting with 98b823f55708f2469ad09c65ad7c5d184678cdc9b567e7d2b08a8dcfca02a7a8 not found: ID does not exist" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.335360 4652 scope.go:117] "RemoveContainer" containerID="e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511" Dec 05 05:44:44 crc kubenswrapper[4652]: E1205 05:44:44.336651 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511\": container with ID starting with e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511 not found: ID does not exist" containerID="e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.336674 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511"} err="failed to get container status \"e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511\": rpc error: code = NotFound desc = could not find container \"e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511\": container with ID starting with e92957f31f7f8633e2821a889eebc6bec6d33b006858bb1e9580c800e0201511 not found: ID does not exist" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.342692 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26ad9bc5-b4f1-476b-8d50-d44153670d74-kube-api-access-fgrvw" (OuterVolumeSpecName: "kube-api-access-fgrvw") pod "26ad9bc5-b4f1-476b-8d50-d44153670d74" (UID: "26ad9bc5-b4f1-476b-8d50-d44153670d74"). InnerVolumeSpecName "kube-api-access-fgrvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.362281 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-9d75f794c-grj2p"] Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.383314 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75cb4c4d5b-gg8r8"] Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.394228 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-fernet-keys\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.394273 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-config-data\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.394296 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-combined-ca-bundle\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.394402 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-public-tls-certs\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.394418 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-internal-tls-certs\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.394458 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-scripts\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.394487 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kxvn\" (UniqueName: \"kubernetes.io/projected/38331fd0-fb06-4672-801b-f9aae8415645-kube-api-access-8kxvn\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.395503 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-credential-keys\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.395613 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgrvw\" (UniqueName: \"kubernetes.io/projected/26ad9bc5-b4f1-476b-8d50-d44153670d74-kube-api-access-fgrvw\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.497193 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "26ad9bc5-b4f1-476b-8d50-d44153670d74" (UID: "26ad9bc5-b4f1-476b-8d50-d44153670d74"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.514195 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5f5b548c68-xzqkg"] Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.514146 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-public-tls-certs\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.521777 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-internal-tls-certs\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.521839 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-scripts\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.521879 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kxvn\" (UniqueName: \"kubernetes.io/projected/38331fd0-fb06-4672-801b-f9aae8415645-kube-api-access-8kxvn\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.521985 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-credential-keys\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.522123 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-fernet-keys\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.522153 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-config-data\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.522171 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-combined-ca-bundle\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.564134 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.572946 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-fernet-keys\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.582064 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-scripts\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.592119 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-config-data\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.600580 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-combined-ca-bundle\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.611302 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-credential-keys\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.618006 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-internal-tls-certs\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.631476 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38331fd0-fb06-4672-801b-f9aae8415645-public-tls-certs\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.635481 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kxvn\" (UniqueName: \"kubernetes.io/projected/38331fd0-fb06-4672-801b-f9aae8415645-kube-api-access-8kxvn\") pod \"keystone-c7bb6b57-bpms4\" (UID: \"38331fd0-fb06-4672-801b-f9aae8415645\") " pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.666294 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "26ad9bc5-b4f1-476b-8d50-d44153670d74" (UID: "26ad9bc5-b4f1-476b-8d50-d44153670d74"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.695084 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "26ad9bc5-b4f1-476b-8d50-d44153670d74" (UID: "26ad9bc5-b4f1-476b-8d50-d44153670d74"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.700064 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-config" (OuterVolumeSpecName: "config") pod "26ad9bc5-b4f1-476b-8d50-d44153670d74" (UID: "26ad9bc5-b4f1-476b-8d50-d44153670d74"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.716831 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "26ad9bc5-b4f1-476b-8d50-d44153670d74" (UID: "26ad9bc5-b4f1-476b-8d50-d44153670d74"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.717004 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.724759 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764bcc8bff-9jvjn"] Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.771926 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.771994 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.772012 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.772032 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/26ad9bc5-b4f1-476b-8d50-d44153670d74-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.794110 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.822906 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-765b8b7694-qt9ss"] Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.847636 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.877670 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7648c6b969-zswpk"] Dec 05 05:44:44 crc kubenswrapper[4652]: I1205 05:44:44.907271 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7648c6b969-zswpk"] Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.077823 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.118354 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.265473 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75cb4c4d5b-gg8r8" event={"ID":"94817a12-145d-4719-b525-0905f7fdf28c","Type":"ContainerStarted","Data":"705e4dfb3da03bce9139d71ef9cf3dc309a42067d76d4b21f4e9be99277c8a78"} Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.265514 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75cb4c4d5b-gg8r8" event={"ID":"94817a12-145d-4719-b525-0905f7fdf28c","Type":"ContainerStarted","Data":"0633e73c8cf099f734ddb3f0e9b6e65db8b0af4b1b687b0f6ed7fc28839fcdf8"} Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.269173 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-765b8b7694-qt9ss" event={"ID":"296c2b64-bd68-4e42-beb4-d49560b48ebe","Type":"ContainerStarted","Data":"324dbafb6cdae2d5b3ccec1a860965aa7c4487fe87401e3ead14d3ce67da7dea"} Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.269199 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-765b8b7694-qt9ss" event={"ID":"296c2b64-bd68-4e42-beb4-d49560b48ebe","Type":"ContainerStarted","Data":"b8e9101a563fa250c08a06d0d54bf3fa824a898b4cec1cdc8579438ef63a744d"} Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.274459 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" event={"ID":"7ff94817-36ec-4a5d-957e-ca4ccf1c3982","Type":"ContainerStarted","Data":"c38fb71a286a56f73f253e2a0aa98bc7487eaeeaced8698b40d3768fb05e349e"} Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.280663 4652 generic.go:334] "Generic (PLEG): container finished" podID="ef71f602-26d2-4185-a82a-8d7906d81786" containerID="4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626" exitCode=0 Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.280705 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" event={"ID":"ef71f602-26d2-4185-a82a-8d7906d81786","Type":"ContainerDied","Data":"4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626"} Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.280721 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" event={"ID":"ef71f602-26d2-4185-a82a-8d7906d81786","Type":"ContainerStarted","Data":"4b4c5687135b9759d4638d565d6b634efb2fffb8c8fa9a4fd9ca0294d3682dfd"} Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.287099 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-9d75f794c-grj2p" event={"ID":"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42","Type":"ContainerStarted","Data":"52d3a927191e0e8f8442126d31523a0c4ed920327be233ea6f56ba21d10b8319"} Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.289515 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.346090 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.350014 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Dec 05 05:44:45 crc kubenswrapper[4652]: I1205 05:44:45.396624 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c7bb6b57-bpms4"] Dec 05 05:44:45 crc kubenswrapper[4652]: W1205 05:44:45.420748 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38331fd0_fb06_4672_801b_f9aae8415645.slice/crio-470a0226db360b7fd8400cdbf6987b59448e6f2dd8174d86b84c4c8e0d66b144 WatchSource:0}: Error finding container 470a0226db360b7fd8400cdbf6987b59448e6f2dd8174d86b84c4c8e0d66b144: Status 404 returned error can't find the container with id 470a0226db360b7fd8400cdbf6987b59448e6f2dd8174d86b84c4c8e0d66b144 Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.152704 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26ad9bc5-b4f1-476b-8d50-d44153670d74" path="/var/lib/kubelet/pods/26ad9bc5-b4f1-476b-8d50-d44153670d74/volumes" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.297711 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c7bb6b57-bpms4" event={"ID":"38331fd0-fb06-4672-801b-f9aae8415645","Type":"ContainerStarted","Data":"470a0226db360b7fd8400cdbf6987b59448e6f2dd8174d86b84c4c8e0d66b144"} Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.301483 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75cb4c4d5b-gg8r8" event={"ID":"94817a12-145d-4719-b525-0905f7fdf28c","Type":"ContainerStarted","Data":"899929a23f7cc645185488ac6d78394964c540cf4f280a80e7dca2d5db29967c"} Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.302634 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.302697 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.308295 4652 generic.go:334] "Generic (PLEG): container finished" podID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerID="45845c4c4d40fa1001367e8f6d74e8f4fc8cddaa1c3feae8f78472a5ef8c3442" exitCode=1 Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.308346 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerDied","Data":"45845c4c4d40fa1001367e8f6d74e8f4fc8cddaa1c3feae8f78472a5ef8c3442"} Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.308367 4652 scope.go:117] "RemoveContainer" containerID="eabaaa268d144d039a172c70b9f7f90078e7b3ba8da2dd7855954b59161d2790" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.309006 4652 scope.go:117] "RemoveContainer" containerID="45845c4c4d40fa1001367e8f6d74e8f4fc8cddaa1c3feae8f78472a5ef8c3442" Dec 05 05:44:46 crc kubenswrapper[4652]: E1205 05:44:46.309214 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(ca914a04-6a6f-4b20-af32-e0771a7dffa5)\"" pod="openstack/watcher-decision-engine-0" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.312640 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-765b8b7694-qt9ss" event={"ID":"296c2b64-bd68-4e42-beb4-d49560b48ebe","Type":"ContainerStarted","Data":"506138841ccf32d2034706204a7adbe83bd5aeec862d9a34cbcd6c5c18716129"} Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.312666 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.312688 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.317156 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-75cb4c4d5b-gg8r8" podStartSLOduration=3.317147645 podStartE2EDuration="3.317147645s" podCreationTimestamp="2025-12-05 05:44:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:46.315812266 +0000 UTC m=+1088.552542533" watchObservedRunningTime="2025-12-05 05:44:46.317147645 +0000 UTC m=+1088.553877911" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.358325 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-765b8b7694-qt9ss" podStartSLOduration=3.35831226 podStartE2EDuration="3.35831226s" podCreationTimestamp="2025-12-05 05:44:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:46.350171347 +0000 UTC m=+1088.586901614" watchObservedRunningTime="2025-12-05 05:44:46.35831226 +0000 UTC m=+1088.595042527" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.527401 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6998b65996-4b5mf"] Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.529135 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.531914 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.533589 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.554048 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6998b65996-4b5mf"] Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.635505 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-combined-ca-bundle\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.635619 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8afcb8d2-22df-47a6-991c-c39a75b6834f-logs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.635650 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-config-data\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.635671 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms6f4\" (UniqueName: \"kubernetes.io/projected/8afcb8d2-22df-47a6-991c-c39a75b6834f-kube-api-access-ms6f4\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.635704 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-public-tls-certs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.635765 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-config-data-custom\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.635833 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-internal-tls-certs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.737199 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8afcb8d2-22df-47a6-991c-c39a75b6834f-logs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.737241 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-config-data\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.737261 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms6f4\" (UniqueName: \"kubernetes.io/projected/8afcb8d2-22df-47a6-991c-c39a75b6834f-kube-api-access-ms6f4\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.737291 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-public-tls-certs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.737338 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-config-data-custom\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.737373 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-internal-tls-certs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.737412 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-combined-ca-bundle\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.738222 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8afcb8d2-22df-47a6-991c-c39a75b6834f-logs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.742270 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-public-tls-certs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.743042 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-config-data\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.751423 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-combined-ca-bundle\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.751678 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-internal-tls-certs\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.753710 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8afcb8d2-22df-47a6-991c-c39a75b6834f-config-data-custom\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.759848 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms6f4\" (UniqueName: \"kubernetes.io/projected/8afcb8d2-22df-47a6-991c-c39a75b6834f-kube-api-access-ms6f4\") pod \"barbican-api-6998b65996-4b5mf\" (UID: \"8afcb8d2-22df-47a6-991c-c39a75b6834f\") " pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.848899 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:46 crc kubenswrapper[4652]: I1205 05:44:46.935092 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Dec 05 05:44:47 crc kubenswrapper[4652]: I1205 05:44:47.044906 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Dec 05 05:44:47 crc kubenswrapper[4652]: I1205 05:44:47.319709 4652 scope.go:117] "RemoveContainer" containerID="45845c4c4d40fa1001367e8f6d74e8f4fc8cddaa1c3feae8f78472a5ef8c3442" Dec 05 05:44:47 crc kubenswrapper[4652]: E1205 05:44:47.320101 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(ca914a04-6a6f-4b20-af32-e0771a7dffa5)\"" pod="openstack/watcher-decision-engine-0" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" Dec 05 05:44:47 crc kubenswrapper[4652]: I1205 05:44:47.335609 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Dec 05 05:44:49 crc kubenswrapper[4652]: I1205 05:44:49.369114 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" event={"ID":"ef71f602-26d2-4185-a82a-8d7906d81786","Type":"ContainerStarted","Data":"3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58"} Dec 05 05:44:49 crc kubenswrapper[4652]: I1205 05:44:49.369660 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:49 crc kubenswrapper[4652]: I1205 05:44:49.391593 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" podStartSLOduration=6.391576527 podStartE2EDuration="6.391576527s" podCreationTimestamp="2025-12-05 05:44:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:49.385678187 +0000 UTC m=+1091.622408455" watchObservedRunningTime="2025-12-05 05:44:49.391576527 +0000 UTC m=+1091.628306793" Dec 05 05:44:50 crc kubenswrapper[4652]: I1205 05:44:50.021735 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7744f5c9d6-t75mf" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.159:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.159:8443: connect: connection refused" Dec 05 05:44:50 crc kubenswrapper[4652]: I1205 05:44:50.283278 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c77c7b944-twjsn" podUID="a1465128-fcb6-49f8-8879-96e87d51b967" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.160:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.160:8443: connect: connection refused" Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.382849 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" event={"ID":"7ff94817-36ec-4a5d-957e-ca4ccf1c3982","Type":"ContainerStarted","Data":"4218fef4f9e64bd53d7e9394f146708f7b715a298f62f71e424f0e33e8fea3fa"} Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.384830 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-9d75f794c-grj2p" event={"ID":"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42","Type":"ContainerStarted","Data":"5ce34f129544792bad3506c1db970abde20ac1a0dc0abe470c04067f98413454"} Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.386344 4652 generic.go:334] "Generic (PLEG): container finished" podID="a1e961f4-2398-4a5e-a424-e8066a6a7c78" containerID="5197c73104627b9852dd8720560d7a36be572929880f88a4c8e9d9cb6d23ee93" exitCode=0 Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.386391 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nmg5q" event={"ID":"a1e961f4-2398-4a5e-a424-e8066a6a7c78","Type":"ContainerDied","Data":"5197c73104627b9852dd8720560d7a36be572929880f88a4c8e9d9cb6d23ee93"} Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.388828 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c7bb6b57-bpms4" event={"ID":"38331fd0-fb06-4672-801b-f9aae8415645","Type":"ContainerStarted","Data":"a3cb015f656d2379708476781d3d1f00493f14de8c57de6b44ca0efd922c942f"} Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.388930 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.390694 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerStarted","Data":"dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf"} Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.417745 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-c7bb6b57-bpms4" podStartSLOduration=7.417730133 podStartE2EDuration="7.417730133s" podCreationTimestamp="2025-12-05 05:44:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:51.412700415 +0000 UTC m=+1093.649430682" watchObservedRunningTime="2025-12-05 05:44:51.417730133 +0000 UTC m=+1093.654460400" Dec 05 05:44:51 crc kubenswrapper[4652]: W1205 05:44:51.478663 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8afcb8d2_22df_47a6_991c_c39a75b6834f.slice/crio-20c4c37005ba88766b9fdf47a56071fec9e4cd15e2ad883dc3178742b541fe50 WatchSource:0}: Error finding container 20c4c37005ba88766b9fdf47a56071fec9e4cd15e2ad883dc3178742b541fe50: Status 404 returned error can't find the container with id 20c4c37005ba88766b9fdf47a56071fec9e4cd15e2ad883dc3178742b541fe50 Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.479227 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6998b65996-4b5mf"] Dec 05 05:44:51 crc kubenswrapper[4652]: I1205 05:44:51.636681 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-765b8b7694-qt9ss" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.402766 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" event={"ID":"7ff94817-36ec-4a5d-957e-ca4ccf1c3982","Type":"ContainerStarted","Data":"9d109b09044a7f7f57f187d2935369172970eed96f388762b8c9baeb97861124"} Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.407739 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6998b65996-4b5mf" event={"ID":"8afcb8d2-22df-47a6-991c-c39a75b6834f","Type":"ContainerStarted","Data":"67d69d246f6b0d71354275ba33e972ee7d60e42e2e9399bc75f1520128331d74"} Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.407774 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6998b65996-4b5mf" event={"ID":"8afcb8d2-22df-47a6-991c-c39a75b6834f","Type":"ContainerStarted","Data":"701c291f1cd5c214e4f491be48f58120f01485688ef060f0081cc805c2b32656"} Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.407785 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6998b65996-4b5mf" event={"ID":"8afcb8d2-22df-47a6-991c-c39a75b6834f","Type":"ContainerStarted","Data":"20c4c37005ba88766b9fdf47a56071fec9e4cd15e2ad883dc3178742b541fe50"} Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.408368 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.408394 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.409924 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-9d75f794c-grj2p" event={"ID":"3acc1f50-b762-48ef-a1ee-dae3fa8bfe42","Type":"ContainerStarted","Data":"eab2e8464b3228d5946d987cea4d76a227805bc087f84c7c8ba42f1d84b8c1ee"} Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.411775 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xcf4m" event={"ID":"266e7065-7af6-4547-b7bb-5e981e095969","Type":"ContainerStarted","Data":"1b4b8a26b5fb42022c7f44aee8c181036979d5fd24204b20d1e46afda9363848"} Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.423842 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5f5b548c68-xzqkg" podStartSLOduration=2.927140822 podStartE2EDuration="9.423828022s" podCreationTimestamp="2025-12-05 05:44:43 +0000 UTC" firstStartedPulling="2025-12-05 05:44:44.584935095 +0000 UTC m=+1086.821665363" lastFinishedPulling="2025-12-05 05:44:51.081622297 +0000 UTC m=+1093.318352563" observedRunningTime="2025-12-05 05:44:52.416280152 +0000 UTC m=+1094.653010419" watchObservedRunningTime="2025-12-05 05:44:52.423828022 +0000 UTC m=+1094.660558288" Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.463690 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-9d75f794c-grj2p" podStartSLOduration=2.739596126 podStartE2EDuration="9.463674953s" podCreationTimestamp="2025-12-05 05:44:43 +0000 UTC" firstStartedPulling="2025-12-05 05:44:44.358050392 +0000 UTC m=+1086.594780658" lastFinishedPulling="2025-12-05 05:44:51.082129218 +0000 UTC m=+1093.318859485" observedRunningTime="2025-12-05 05:44:52.461622566 +0000 UTC m=+1094.698352833" watchObservedRunningTime="2025-12-05 05:44:52.463674953 +0000 UTC m=+1094.700405219" Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.464303 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6998b65996-4b5mf" podStartSLOduration=6.46429718 podStartE2EDuration="6.46429718s" podCreationTimestamp="2025-12-05 05:44:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:52.444671344 +0000 UTC m=+1094.681401611" watchObservedRunningTime="2025-12-05 05:44:52.46429718 +0000 UTC m=+1094.701027448" Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.734287 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.777233 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-xcf4m" podStartSLOduration=2.1350510209999998 podStartE2EDuration="52.777211459s" podCreationTimestamp="2025-12-05 05:44:00 +0000 UTC" firstStartedPulling="2025-12-05 05:44:01.158017409 +0000 UTC m=+1043.394747676" lastFinishedPulling="2025-12-05 05:44:51.800177847 +0000 UTC m=+1094.036908114" observedRunningTime="2025-12-05 05:44:52.477489041 +0000 UTC m=+1094.714219308" watchObservedRunningTime="2025-12-05 05:44:52.777211459 +0000 UTC m=+1095.013941726" Dec 05 05:44:52 crc kubenswrapper[4652]: I1205 05:44:52.946932 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nmg5q" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.091847 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-combined-ca-bundle\") pod \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.092172 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-db-sync-config-data\") pod \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.092218 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-config-data\") pod \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.092272 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg7pc\" (UniqueName: \"kubernetes.io/projected/a1e961f4-2398-4a5e-a424-e8066a6a7c78-kube-api-access-sg7pc\") pod \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\" (UID: \"a1e961f4-2398-4a5e-a424-e8066a6a7c78\") " Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.098770 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a1e961f4-2398-4a5e-a424-e8066a6a7c78" (UID: "a1e961f4-2398-4a5e-a424-e8066a6a7c78"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.100612 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1e961f4-2398-4a5e-a424-e8066a6a7c78-kube-api-access-sg7pc" (OuterVolumeSpecName: "kube-api-access-sg7pc") pod "a1e961f4-2398-4a5e-a424-e8066a6a7c78" (UID: "a1e961f4-2398-4a5e-a424-e8066a6a7c78"). InnerVolumeSpecName "kube-api-access-sg7pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.124506 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1e961f4-2398-4a5e-a424-e8066a6a7c78" (UID: "a1e961f4-2398-4a5e-a424-e8066a6a7c78"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.143938 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-config-data" (OuterVolumeSpecName: "config-data") pod "a1e961f4-2398-4a5e-a424-e8066a6a7c78" (UID: "a1e961f4-2398-4a5e-a424-e8066a6a7c78"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.194441 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg7pc\" (UniqueName: \"kubernetes.io/projected/a1e961f4-2398-4a5e-a424-e8066a6a7c78-kube-api-access-sg7pc\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.194473 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.194484 4652 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.194570 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1e961f4-2398-4a5e-a424-e8066a6a7c78-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.443661 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nmg5q" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.443667 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nmg5q" event={"ID":"a1e961f4-2398-4a5e-a424-e8066a6a7c78","Type":"ContainerDied","Data":"dad0187ac78b926326e87fa4f7dafbed4c4ef356fdf8d470ed1c7b2dc7c1ad95"} Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.443805 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dad0187ac78b926326e87fa4f7dafbed4c4ef356fdf8d470ed1c7b2dc7c1ad95" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.670857 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764bcc8bff-9jvjn"] Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.671066 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" podUID="ef71f602-26d2-4185-a82a-8d7906d81786" containerName="dnsmasq-dns" containerID="cri-o://3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58" gracePeriod=10 Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.723135 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-wnqsq"] Dec 05 05:44:53 crc kubenswrapper[4652]: E1205 05:44:53.723590 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1e961f4-2398-4a5e-a424-e8066a6a7c78" containerName="glance-db-sync" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.723607 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1e961f4-2398-4a5e-a424-e8066a6a7c78" containerName="glance-db-sync" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.723782 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1e961f4-2398-4a5e-a424-e8066a6a7c78" containerName="glance-db-sync" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.724728 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.741823 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-wnqsq"] Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.912684 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-sb\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.912733 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-swift-storage-0\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.912755 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8w57c\" (UniqueName: \"kubernetes.io/projected/a00899df-e699-4c80-96e4-761d674a03a0-kube-api-access-8w57c\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.912801 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-svc\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.912818 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-config\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:53 crc kubenswrapper[4652]: I1205 05:44:53.912842 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-nb\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.014844 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-svc\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.014893 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-config\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.014922 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-nb\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.015102 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-sb\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.015123 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-swift-storage-0\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.015144 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8w57c\" (UniqueName: \"kubernetes.io/projected/a00899df-e699-4c80-96e4-761d674a03a0-kube-api-access-8w57c\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.015959 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-svc\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.016520 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-config\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.016652 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-nb\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.016888 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-sb\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.017640 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-swift-storage-0\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.032747 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8w57c\" (UniqueName: \"kubernetes.io/projected/a00899df-e699-4c80-96e4-761d674a03a0-kube-api-access-8w57c\") pod \"dnsmasq-dns-84c68846bf-wnqsq\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.090363 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.140246 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.219762 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-config\") pod \"ef71f602-26d2-4185-a82a-8d7906d81786\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.220245 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-svc\") pod \"ef71f602-26d2-4185-a82a-8d7906d81786\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.220352 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-nb\") pod \"ef71f602-26d2-4185-a82a-8d7906d81786\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.220403 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qls5r\" (UniqueName: \"kubernetes.io/projected/ef71f602-26d2-4185-a82a-8d7906d81786-kube-api-access-qls5r\") pod \"ef71f602-26d2-4185-a82a-8d7906d81786\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.220585 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-sb\") pod \"ef71f602-26d2-4185-a82a-8d7906d81786\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.227200 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-swift-storage-0\") pod \"ef71f602-26d2-4185-a82a-8d7906d81786\" (UID: \"ef71f602-26d2-4185-a82a-8d7906d81786\") " Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.237701 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef71f602-26d2-4185-a82a-8d7906d81786-kube-api-access-qls5r" (OuterVolumeSpecName: "kube-api-access-qls5r") pod "ef71f602-26d2-4185-a82a-8d7906d81786" (UID: "ef71f602-26d2-4185-a82a-8d7906d81786"). InnerVolumeSpecName "kube-api-access-qls5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.294226 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ef71f602-26d2-4185-a82a-8d7906d81786" (UID: "ef71f602-26d2-4185-a82a-8d7906d81786"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.306222 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ef71f602-26d2-4185-a82a-8d7906d81786" (UID: "ef71f602-26d2-4185-a82a-8d7906d81786"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.311397 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ef71f602-26d2-4185-a82a-8d7906d81786" (UID: "ef71f602-26d2-4185-a82a-8d7906d81786"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.330370 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qls5r\" (UniqueName: \"kubernetes.io/projected/ef71f602-26d2-4185-a82a-8d7906d81786-kube-api-access-qls5r\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.330395 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.330406 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.330415 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.336392 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ef71f602-26d2-4185-a82a-8d7906d81786" (UID: "ef71f602-26d2-4185-a82a-8d7906d81786"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.343134 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-config" (OuterVolumeSpecName: "config") pod "ef71f602-26d2-4185-a82a-8d7906d81786" (UID: "ef71f602-26d2-4185-a82a-8d7906d81786"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.432548 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.432594 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef71f602-26d2-4185-a82a-8d7906d81786-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.454883 4652 generic.go:334] "Generic (PLEG): container finished" podID="ef71f602-26d2-4185-a82a-8d7906d81786" containerID="3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58" exitCode=0 Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.454940 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.454950 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" event={"ID":"ef71f602-26d2-4185-a82a-8d7906d81786","Type":"ContainerDied","Data":"3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58"} Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.455025 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764bcc8bff-9jvjn" event={"ID":"ef71f602-26d2-4185-a82a-8d7906d81786","Type":"ContainerDied","Data":"4b4c5687135b9759d4638d565d6b634efb2fffb8c8fa9a4fd9ca0294d3682dfd"} Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.455048 4652 scope.go:117] "RemoveContainer" containerID="3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.488413 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764bcc8bff-9jvjn"] Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.495407 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764bcc8bff-9jvjn"] Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.586771 4652 scope.go:117] "RemoveContainer" containerID="4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.611184 4652 scope.go:117] "RemoveContainer" containerID="3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58" Dec 05 05:44:54 crc kubenswrapper[4652]: E1205 05:44:54.611841 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58\": container with ID starting with 3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58 not found: ID does not exist" containerID="3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.611880 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58"} err="failed to get container status \"3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58\": rpc error: code = NotFound desc = could not find container \"3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58\": container with ID starting with 3dc19aeb898e53af60fc757d2e28f8fcf93b228db81952cdf620928f558ece58 not found: ID does not exist" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.611904 4652 scope.go:117] "RemoveContainer" containerID="4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626" Dec 05 05:44:54 crc kubenswrapper[4652]: E1205 05:44:54.612671 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626\": container with ID starting with 4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626 not found: ID does not exist" containerID="4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.612699 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626"} err="failed to get container status \"4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626\": rpc error: code = NotFound desc = could not find container \"4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626\": container with ID starting with 4e6695fcdcbfed980e029abab8ff68a3bcc6853bc3b87e326f4021422a73a626 not found: ID does not exist" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.657281 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-wnqsq"] Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.692013 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:44:54 crc kubenswrapper[4652]: E1205 05:44:54.692682 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef71f602-26d2-4185-a82a-8d7906d81786" containerName="dnsmasq-dns" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.692704 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef71f602-26d2-4185-a82a-8d7906d81786" containerName="dnsmasq-dns" Dec 05 05:44:54 crc kubenswrapper[4652]: E1205 05:44:54.692748 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef71f602-26d2-4185-a82a-8d7906d81786" containerName="init" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.692757 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef71f602-26d2-4185-a82a-8d7906d81786" containerName="init" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.693016 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef71f602-26d2-4185-a82a-8d7906d81786" containerName="dnsmasq-dns" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.694517 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.699765 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mxsrw" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.700019 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.700182 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.714861 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.715899 4652 scope.go:117] "RemoveContainer" containerID="45845c4c4d40fa1001367e8f6d74e8f4fc8cddaa1c3feae8f78472a5ef8c3442" Dec 05 05:44:54 crc kubenswrapper[4652]: E1205 05:44:54.716211 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(ca914a04-6a6f-4b20-af32-e0771a7dffa5)\"" pod="openstack/watcher-decision-engine-0" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.717097 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.845656 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.845714 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8prvn\" (UniqueName: \"kubernetes.io/projected/3e5d534b-49ad-4464-a68a-6ea3de404e3b-kube-api-access-8prvn\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.845767 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.845869 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.845906 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-logs\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.845927 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-config-data\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.845969 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-scripts\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.858312 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.859714 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.867069 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.869858 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.921591 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-9f8cfd587-c4hb6" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.949488 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.949542 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8prvn\" (UniqueName: \"kubernetes.io/projected/3e5d534b-49ad-4464-a68a-6ea3de404e3b-kube-api-access-8prvn\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.949598 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.949869 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.951632 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.951671 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-logs\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.951692 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-config-data\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.951734 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-scripts\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.952896 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.958214 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-logs\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.968363 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.969512 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-config-data\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.980247 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8prvn\" (UniqueName: \"kubernetes.io/projected/3e5d534b-49ad-4464-a68a-6ea3de404e3b-kube-api-access-8prvn\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.981529 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5f57444c8-9lkq9"] Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.981774 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5f57444c8-9lkq9" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerName="neutron-api" containerID="cri-o://33d9447d060975f3b4a6598751b46ebaf05b54df8ff59a1f14204e7f2c5ca06e" gracePeriod=30 Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.982393 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5f57444c8-9lkq9" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerName="neutron-httpd" containerID="cri-o://df2d7400d42d396963dfe9abe80aa7a96d39cd8cefed0945dfa7b52a3fba2906" gracePeriod=30 Dec 05 05:44:54 crc kubenswrapper[4652]: I1205 05:44:54.991724 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-scripts\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.005377 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " pod="openstack/glance-default-external-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.018531 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.058189 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-logs\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.058261 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.058324 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.058371 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2bx5\" (UniqueName: \"kubernetes.io/projected/24e38bb8-1345-4058-a9cc-666c3b370ac8-kube-api-access-n2bx5\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.058436 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.058459 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.058476 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.160472 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.160576 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.160630 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2bx5\" (UniqueName: \"kubernetes.io/projected/24e38bb8-1345-4058-a9cc-666c3b370ac8-kube-api-access-n2bx5\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.160647 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.161848 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.161905 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.161933 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.162095 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-logs\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.163614 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-logs\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.166342 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.179466 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.183001 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.194351 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2bx5\" (UniqueName: \"kubernetes.io/projected/24e38bb8-1345-4058-a9cc-666c3b370ac8-kube-api-access-n2bx5\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.200172 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.256818 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.475374 4652 generic.go:334] "Generic (PLEG): container finished" podID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerID="df2d7400d42d396963dfe9abe80aa7a96d39cd8cefed0945dfa7b52a3fba2906" exitCode=0 Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.475418 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f57444c8-9lkq9" event={"ID":"e90f4813-fb7c-4375-9e7d-94673381ffae","Type":"ContainerDied","Data":"df2d7400d42d396963dfe9abe80aa7a96d39cd8cefed0945dfa7b52a3fba2906"} Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.479802 4652 generic.go:334] "Generic (PLEG): container finished" podID="a00899df-e699-4c80-96e4-761d674a03a0" containerID="b4d7ed97a15f09164620b5169e49db301c7ab66f735aeb8eb9f11537a352a628" exitCode=0 Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.479840 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" event={"ID":"a00899df-e699-4c80-96e4-761d674a03a0","Type":"ContainerDied","Data":"b4d7ed97a15f09164620b5169e49db301c7ab66f735aeb8eb9f11537a352a628"} Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.479861 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" event={"ID":"a00899df-e699-4c80-96e4-761d674a03a0","Type":"ContainerStarted","Data":"80ab81fb2a8be2fafbf8710547cc7239a162fad5805e810fa2b535bfc974a2b2"} Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.504150 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.617299 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:44:55 crc kubenswrapper[4652]: W1205 05:44:55.632339 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e5d534b_49ad_4464_a68a_6ea3de404e3b.slice/crio-7ac2107cb334455f52066989027692e06e87c8eea36f9b3772bb847ed5a6c859 WatchSource:0}: Error finding container 7ac2107cb334455f52066989027692e06e87c8eea36f9b3772bb847ed5a6c859: Status 404 returned error can't find the container with id 7ac2107cb334455f52066989027692e06e87c8eea36f9b3772bb847ed5a6c859 Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.632468 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:55 crc kubenswrapper[4652]: I1205 05:44:55.867965 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.025277 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.139448 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef71f602-26d2-4185-a82a-8d7906d81786" path="/var/lib/kubelet/pods/ef71f602-26d2-4185-a82a-8d7906d81786/volumes" Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.493796 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.506872 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" event={"ID":"a00899df-e699-4c80-96e4-761d674a03a0","Type":"ContainerStarted","Data":"71f2bca4040e75e236bb2a837785789ebeec3ba1bfe1298bf7266694994092b6"} Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.507274 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.508455 4652 generic.go:334] "Generic (PLEG): container finished" podID="266e7065-7af6-4547-b7bb-5e981e095969" containerID="1b4b8a26b5fb42022c7f44aee8c181036979d5fd24204b20d1e46afda9363848" exitCode=0 Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.508507 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xcf4m" event={"ID":"266e7065-7af6-4547-b7bb-5e981e095969","Type":"ContainerDied","Data":"1b4b8a26b5fb42022c7f44aee8c181036979d5fd24204b20d1e46afda9363848"} Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.509724 4652 generic.go:334] "Generic (PLEG): container finished" podID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerID="33d9447d060975f3b4a6598751b46ebaf05b54df8ff59a1f14204e7f2c5ca06e" exitCode=0 Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.509755 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f57444c8-9lkq9" event={"ID":"e90f4813-fb7c-4375-9e7d-94673381ffae","Type":"ContainerDied","Data":"33d9447d060975f3b4a6598751b46ebaf05b54df8ff59a1f14204e7f2c5ca06e"} Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.511025 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3e5d534b-49ad-4464-a68a-6ea3de404e3b","Type":"ContainerStarted","Data":"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837"} Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.511043 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3e5d534b-49ad-4464-a68a-6ea3de404e3b","Type":"ContainerStarted","Data":"7ac2107cb334455f52066989027692e06e87c8eea36f9b3772bb847ed5a6c859"} Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.542879 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" podStartSLOduration=3.5428592610000003 podStartE2EDuration="3.542859261s" podCreationTimestamp="2025-12-05 05:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:44:56.53850388 +0000 UTC m=+1098.775234147" watchObservedRunningTime="2025-12-05 05:44:56.542859261 +0000 UTC m=+1098.779589528" Dec 05 05:44:56 crc kubenswrapper[4652]: I1205 05:44:56.682045 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:44:58 crc kubenswrapper[4652]: I1205 05:44:58.205063 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:58 crc kubenswrapper[4652]: I1205 05:44:58.218332 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6998b65996-4b5mf" Dec 05 05:44:58 crc kubenswrapper[4652]: I1205 05:44:58.300077 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-765b8b7694-qt9ss"] Dec 05 05:44:58 crc kubenswrapper[4652]: I1205 05:44:58.300543 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-765b8b7694-qt9ss" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api-log" containerID="cri-o://324dbafb6cdae2d5b3ccec1a860965aa7c4487fe87401e3ead14d3ce67da7dea" gracePeriod=30 Dec 05 05:44:58 crc kubenswrapper[4652]: I1205 05:44:58.300950 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-765b8b7694-qt9ss" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api" containerID="cri-o://506138841ccf32d2034706204a7adbe83bd5aeec862d9a34cbcd6c5c18716129" gracePeriod=30 Dec 05 05:44:58 crc kubenswrapper[4652]: I1205 05:44:58.537944 4652 generic.go:334] "Generic (PLEG): container finished" podID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerID="324dbafb6cdae2d5b3ccec1a860965aa7c4487fe87401e3ead14d3ce67da7dea" exitCode=143 Dec 05 05:44:58 crc kubenswrapper[4652]: I1205 05:44:58.539200 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-765b8b7694-qt9ss" event={"ID":"296c2b64-bd68-4e42-beb4-d49560b48ebe","Type":"ContainerDied","Data":"324dbafb6cdae2d5b3ccec1a860965aa7c4487fe87401e3ead14d3ce67da7dea"} Dec 05 05:44:59 crc kubenswrapper[4652]: I1205 05:44:59.213835 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-765b8b7694-qt9ss" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.170:9311/healthcheck\": dial tcp 10.217.0.170:9311: connect: connection refused" Dec 05 05:44:59 crc kubenswrapper[4652]: I1205 05:44:59.213874 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-765b8b7694-qt9ss" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.170:9311/healthcheck\": dial tcp 10.217.0.170:9311: connect: connection refused" Dec 05 05:44:59 crc kubenswrapper[4652]: I1205 05:44:59.561853 4652 generic.go:334] "Generic (PLEG): container finished" podID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerID="506138841ccf32d2034706204a7adbe83bd5aeec862d9a34cbcd6c5c18716129" exitCode=0 Dec 05 05:44:59 crc kubenswrapper[4652]: I1205 05:44:59.561926 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-765b8b7694-qt9ss" event={"ID":"296c2b64-bd68-4e42-beb4-d49560b48ebe","Type":"ContainerDied","Data":"506138841ccf32d2034706204a7adbe83bd5aeec862d9a34cbcd6c5c18716129"} Dec 05 05:44:59 crc kubenswrapper[4652]: I1205 05:44:59.977490 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.008357 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-db-sync-config-data\") pod \"266e7065-7af6-4547-b7bb-5e981e095969\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.008394 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-scripts\") pod \"266e7065-7af6-4547-b7bb-5e981e095969\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.008510 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-combined-ca-bundle\") pod \"266e7065-7af6-4547-b7bb-5e981e095969\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.009440 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffkhs\" (UniqueName: \"kubernetes.io/projected/266e7065-7af6-4547-b7bb-5e981e095969-kube-api-access-ffkhs\") pod \"266e7065-7af6-4547-b7bb-5e981e095969\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.009469 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-config-data\") pod \"266e7065-7af6-4547-b7bb-5e981e095969\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.009617 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/266e7065-7af6-4547-b7bb-5e981e095969-etc-machine-id\") pod \"266e7065-7af6-4547-b7bb-5e981e095969\" (UID: \"266e7065-7af6-4547-b7bb-5e981e095969\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.010054 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/266e7065-7af6-4547-b7bb-5e981e095969-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "266e7065-7af6-4547-b7bb-5e981e095969" (UID: "266e7065-7af6-4547-b7bb-5e981e095969"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.010671 4652 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/266e7065-7af6-4547-b7bb-5e981e095969-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.014990 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-scripts" (OuterVolumeSpecName: "scripts") pod "266e7065-7af6-4547-b7bb-5e981e095969" (UID: "266e7065-7af6-4547-b7bb-5e981e095969"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.039700 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/266e7065-7af6-4547-b7bb-5e981e095969-kube-api-access-ffkhs" (OuterVolumeSpecName: "kube-api-access-ffkhs") pod "266e7065-7af6-4547-b7bb-5e981e095969" (UID: "266e7065-7af6-4547-b7bb-5e981e095969"). InnerVolumeSpecName "kube-api-access-ffkhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.039686 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "266e7065-7af6-4547-b7bb-5e981e095969" (UID: "266e7065-7af6-4547-b7bb-5e981e095969"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.051755 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "266e7065-7af6-4547-b7bb-5e981e095969" (UID: "266e7065-7af6-4547-b7bb-5e981e095969"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.072040 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-config-data" (OuterVolumeSpecName: "config-data") pod "266e7065-7af6-4547-b7bb-5e981e095969" (UID: "266e7065-7af6-4547-b7bb-5e981e095969"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.111921 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.112089 4652 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.112101 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.112109 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/266e7065-7af6-4547-b7bb-5e981e095969-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.112119 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffkhs\" (UniqueName: \"kubernetes.io/projected/266e7065-7af6-4547-b7bb-5e981e095969-kube-api-access-ffkhs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.156271 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94"] Dec 05 05:45:00 crc kubenswrapper[4652]: E1205 05:45:00.156724 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="266e7065-7af6-4547-b7bb-5e981e095969" containerName="cinder-db-sync" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.156743 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="266e7065-7af6-4547-b7bb-5e981e095969" containerName="cinder-db-sync" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.156958 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="266e7065-7af6-4547-b7bb-5e981e095969" containerName="cinder-db-sync" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.157850 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.159485 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.159724 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.160057 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94"] Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.214407 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99af3ed8-4da1-4029-8876-20a311b1762c-config-volume\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.214572 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99af3ed8-4da1-4029-8876-20a311b1762c-secret-volume\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.214702 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pft7\" (UniqueName: \"kubernetes.io/projected/99af3ed8-4da1-4029-8876-20a311b1762c-kube-api-access-8pft7\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.317074 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99af3ed8-4da1-4029-8876-20a311b1762c-config-volume\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.317238 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99af3ed8-4da1-4029-8876-20a311b1762c-secret-volume\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.317390 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pft7\" (UniqueName: \"kubernetes.io/projected/99af3ed8-4da1-4029-8876-20a311b1762c-kube-api-access-8pft7\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.318221 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99af3ed8-4da1-4029-8876-20a311b1762c-config-volume\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.321916 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99af3ed8-4da1-4029-8876-20a311b1762c-secret-volume\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.332356 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pft7\" (UniqueName: \"kubernetes.io/projected/99af3ed8-4da1-4029-8876-20a311b1762c-kube-api-access-8pft7\") pod \"collect-profiles-29415225-k4b94\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.444195 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.455513 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.479632 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521051 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-config\") pod \"e90f4813-fb7c-4375-9e7d-94673381ffae\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521100 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-ovndb-tls-certs\") pod \"e90f4813-fb7c-4375-9e7d-94673381ffae\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521159 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/296c2b64-bd68-4e42-beb4-d49560b48ebe-logs\") pod \"296c2b64-bd68-4e42-beb4-d49560b48ebe\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521279 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqh5j\" (UniqueName: \"kubernetes.io/projected/e90f4813-fb7c-4375-9e7d-94673381ffae-kube-api-access-wqh5j\") pod \"e90f4813-fb7c-4375-9e7d-94673381ffae\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521343 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data-custom\") pod \"296c2b64-bd68-4e42-beb4-d49560b48ebe\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521403 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5bxg\" (UniqueName: \"kubernetes.io/projected/296c2b64-bd68-4e42-beb4-d49560b48ebe-kube-api-access-v5bxg\") pod \"296c2b64-bd68-4e42-beb4-d49560b48ebe\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521454 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data\") pod \"296c2b64-bd68-4e42-beb4-d49560b48ebe\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521603 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-httpd-config\") pod \"e90f4813-fb7c-4375-9e7d-94673381ffae\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521761 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-combined-ca-bundle\") pod \"e90f4813-fb7c-4375-9e7d-94673381ffae\" (UID: \"e90f4813-fb7c-4375-9e7d-94673381ffae\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.521814 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-combined-ca-bundle\") pod \"296c2b64-bd68-4e42-beb4-d49560b48ebe\" (UID: \"296c2b64-bd68-4e42-beb4-d49560b48ebe\") " Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.523491 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/296c2b64-bd68-4e42-beb4-d49560b48ebe-logs" (OuterVolumeSpecName: "logs") pod "296c2b64-bd68-4e42-beb4-d49560b48ebe" (UID: "296c2b64-bd68-4e42-beb4-d49560b48ebe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.528591 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "296c2b64-bd68-4e42-beb4-d49560b48ebe" (UID: "296c2b64-bd68-4e42-beb4-d49560b48ebe"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.532199 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e90f4813-fb7c-4375-9e7d-94673381ffae-kube-api-access-wqh5j" (OuterVolumeSpecName: "kube-api-access-wqh5j") pod "e90f4813-fb7c-4375-9e7d-94673381ffae" (UID: "e90f4813-fb7c-4375-9e7d-94673381ffae"). InnerVolumeSpecName "kube-api-access-wqh5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.532370 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/296c2b64-bd68-4e42-beb4-d49560b48ebe-kube-api-access-v5bxg" (OuterVolumeSpecName: "kube-api-access-v5bxg") pod "296c2b64-bd68-4e42-beb4-d49560b48ebe" (UID: "296c2b64-bd68-4e42-beb4-d49560b48ebe"). InnerVolumeSpecName "kube-api-access-v5bxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.532684 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "e90f4813-fb7c-4375-9e7d-94673381ffae" (UID: "e90f4813-fb7c-4375-9e7d-94673381ffae"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.565192 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "296c2b64-bd68-4e42-beb4-d49560b48ebe" (UID: "296c2b64-bd68-4e42-beb4-d49560b48ebe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.573992 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"24e38bb8-1345-4058-a9cc-666c3b370ac8","Type":"ContainerStarted","Data":"d475ec0ac24fd6807dbbb1d6323091037f91ec0065c7082a2afafeb7f9fecff7"} Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.587071 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xcf4m" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.587324 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xcf4m" event={"ID":"266e7065-7af6-4547-b7bb-5e981e095969","Type":"ContainerDied","Data":"c95a151926a48c092d999269319cfc2313153e8995c69d0bb7450f7fadb46abe"} Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.587377 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c95a151926a48c092d999269319cfc2313153e8995c69d0bb7450f7fadb46abe" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.603644 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f57444c8-9lkq9" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.603944 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f57444c8-9lkq9" event={"ID":"e90f4813-fb7c-4375-9e7d-94673381ffae","Type":"ContainerDied","Data":"88bfdda16b92ce1232617c3993fbdc015866fc86d87db96d8588e9b159185805"} Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.604012 4652 scope.go:117] "RemoveContainer" containerID="df2d7400d42d396963dfe9abe80aa7a96d39cd8cefed0945dfa7b52a3fba2906" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.604696 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data" (OuterVolumeSpecName: "config-data") pod "296c2b64-bd68-4e42-beb4-d49560b48ebe" (UID: "296c2b64-bd68-4e42-beb4-d49560b48ebe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.608313 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-765b8b7694-qt9ss" event={"ID":"296c2b64-bd68-4e42-beb4-d49560b48ebe","Type":"ContainerDied","Data":"b8e9101a563fa250c08a06d0d54bf3fa824a898b4cec1cdc8579438ef63a744d"} Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.608412 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-765b8b7694-qt9ss" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.630057 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.630098 4652 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.630110 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.630128 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/296c2b64-bd68-4e42-beb4-d49560b48ebe-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.630140 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqh5j\" (UniqueName: \"kubernetes.io/projected/e90f4813-fb7c-4375-9e7d-94673381ffae-kube-api-access-wqh5j\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.630150 4652 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/296c2b64-bd68-4e42-beb4-d49560b48ebe-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.630159 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5bxg\" (UniqueName: \"kubernetes.io/projected/296c2b64-bd68-4e42-beb4-d49560b48ebe-kube-api-access-v5bxg\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.636950 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-config" (OuterVolumeSpecName: "config") pod "e90f4813-fb7c-4375-9e7d-94673381ffae" (UID: "e90f4813-fb7c-4375-9e7d-94673381ffae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.644083 4652 scope.go:117] "RemoveContainer" containerID="33d9447d060975f3b4a6598751b46ebaf05b54df8ff59a1f14204e7f2c5ca06e" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.655140 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e90f4813-fb7c-4375-9e7d-94673381ffae" (UID: "e90f4813-fb7c-4375-9e7d-94673381ffae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.685801 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "e90f4813-fb7c-4375-9e7d-94673381ffae" (UID: "e90f4813-fb7c-4375-9e7d-94673381ffae"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.689091 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-765b8b7694-qt9ss"] Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.700210 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-765b8b7694-qt9ss"] Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.710795 4652 scope.go:117] "RemoveContainer" containerID="506138841ccf32d2034706204a7adbe83bd5aeec862d9a34cbcd6c5c18716129" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.732365 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.732393 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.735763 4652 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e90f4813-fb7c-4375-9e7d-94673381ffae-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.754369 4652 scope.go:117] "RemoveContainer" containerID="324dbafb6cdae2d5b3ccec1a860965aa7c4487fe87401e3ead14d3ce67da7dea" Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.940263 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5f57444c8-9lkq9"] Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.945960 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5f57444c8-9lkq9"] Dec 05 05:45:00 crc kubenswrapper[4652]: I1205 05:45:00.951366 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94"] Dec 05 05:45:00 crc kubenswrapper[4652]: W1205 05:45:00.956064 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99af3ed8_4da1_4029_8876_20a311b1762c.slice/crio-1ede94ddc51c6af7f34b04337335c872afc0899691a6f77213ba0a8cf7afa0ec WatchSource:0}: Error finding container 1ede94ddc51c6af7f34b04337335c872afc0899691a6f77213ba0a8cf7afa0ec: Status 404 returned error can't find the container with id 1ede94ddc51c6af7f34b04337335c872afc0899691a6f77213ba0a8cf7afa0ec Dec 05 05:45:01 crc kubenswrapper[4652]: E1205 05:45:01.092197 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode90f4813_fb7c_4375_9e7d_94673381ffae.slice/crio-88bfdda16b92ce1232617c3993fbdc015866fc86d87db96d8588e9b159185805\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode90f4813_fb7c_4375_9e7d_94673381ffae.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.309605 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:01 crc kubenswrapper[4652]: E1205 05:45:01.310313 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.310326 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api" Dec 05 05:45:01 crc kubenswrapper[4652]: E1205 05:45:01.310347 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api-log" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.310352 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api-log" Dec 05 05:45:01 crc kubenswrapper[4652]: E1205 05:45:01.310363 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerName="neutron-api" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.310368 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerName="neutron-api" Dec 05 05:45:01 crc kubenswrapper[4652]: E1205 05:45:01.310381 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerName="neutron-httpd" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.310387 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerName="neutron-httpd" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.310565 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api-log" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.310576 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" containerName="barbican-api" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.310582 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerName="neutron-httpd" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.310603 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" containerName="neutron-api" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.311599 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.324104 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.324280 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-s89nn" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.324506 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.324760 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.345389 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.377691 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-wnqsq"] Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.377922 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" podUID="a00899df-e699-4c80-96e4-761d674a03a0" containerName="dnsmasq-dns" containerID="cri-o://71f2bca4040e75e236bb2a837785789ebeec3ba1bfe1298bf7266694994092b6" gracePeriod=10 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.378850 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.447677 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75958fc765-m6xxk"] Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.449466 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.460544 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.460635 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c2352b28-244f-49c3-92fe-02b1a5ce33b2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.460697 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.460717 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.460746 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-scripts\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.460790 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2wf7\" (UniqueName: \"kubernetes.io/projected/c2352b28-244f-49c3-92fe-02b1a5ce33b2-kube-api-access-w2wf7\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.470119 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-m6xxk"] Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.520146 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.522281 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.534976 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.535862 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564243 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-nb\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564309 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2wf7\" (UniqueName: \"kubernetes.io/projected/c2352b28-244f-49c3-92fe-02b1a5ce33b2-kube-api-access-w2wf7\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564367 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564388 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2lr7\" (UniqueName: \"kubernetes.io/projected/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-kube-api-access-b2lr7\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564473 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-swift-storage-0\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564491 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-svc\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564514 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c2352b28-244f-49c3-92fe-02b1a5ce33b2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564545 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-config\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564695 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-sb\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564718 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564733 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.564780 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-scripts\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.565285 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c2352b28-244f-49c3-92fe-02b1a5ce33b2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.571488 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.574225 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-scripts\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.574871 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.580516 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.587387 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2wf7\" (UniqueName: \"kubernetes.io/projected/c2352b28-244f-49c3-92fe-02b1a5ce33b2-kube-api-access-w2wf7\") pod \"cinder-scheduler-0\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.629836 4652 generic.go:334] "Generic (PLEG): container finished" podID="99af3ed8-4da1-4029-8876-20a311b1762c" containerID="74d8266a7df36f6402a9882502e17b2a2ee6ce84da1486bd90de96e5b94c14e3" exitCode=0 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.629903 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" event={"ID":"99af3ed8-4da1-4029-8876-20a311b1762c","Type":"ContainerDied","Data":"74d8266a7df36f6402a9882502e17b2a2ee6ce84da1486bd90de96e5b94c14e3"} Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.629931 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" event={"ID":"99af3ed8-4da1-4029-8876-20a311b1762c","Type":"ContainerStarted","Data":"1ede94ddc51c6af7f34b04337335c872afc0899691a6f77213ba0a8cf7afa0ec"} Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.648153 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.662726 4652 generic.go:334] "Generic (PLEG): container finished" podID="a00899df-e699-4c80-96e4-761d674a03a0" containerID="71f2bca4040e75e236bb2a837785789ebeec3ba1bfe1298bf7266694994092b6" exitCode=0 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.662801 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" event={"ID":"a00899df-e699-4c80-96e4-761d674a03a0","Type":"ContainerDied","Data":"71f2bca4040e75e236bb2a837785789ebeec3ba1bfe1298bf7266694994092b6"} Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666459 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f360888b-244b-4d42-80f8-6cf7e04335ab-logs\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666508 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666566 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-swift-storage-0\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666584 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-svc\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666607 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-config\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666623 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqsz9\" (UniqueName: \"kubernetes.io/projected/f360888b-244b-4d42-80f8-6cf7e04335ab-kube-api-access-jqsz9\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666681 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-sb\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666712 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data-custom\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666728 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f360888b-244b-4d42-80f8-6cf7e04335ab-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666763 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-nb\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666794 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666810 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-scripts\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.666830 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2lr7\" (UniqueName: \"kubernetes.io/projected/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-kube-api-access-b2lr7\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.667810 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-swift-storage-0\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.668286 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-svc\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.668791 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-config\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.669258 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-sb\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.669784 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-nb\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.680310 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerStarted","Data":"d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2"} Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.680482 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="ceilometer-central-agent" containerID="cri-o://9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317" gracePeriod=30 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.680492 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2lr7\" (UniqueName: \"kubernetes.io/projected/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-kube-api-access-b2lr7\") pod \"dnsmasq-dns-75958fc765-m6xxk\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.680577 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.680593 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="proxy-httpd" containerID="cri-o://d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2" gracePeriod=30 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.680633 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="sg-core" containerID="cri-o://dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf" gracePeriod=30 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.680666 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="ceilometer-notification-agent" containerID="cri-o://6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58" gracePeriod=30 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.705198 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3e5d534b-49ad-4464-a68a-6ea3de404e3b","Type":"ContainerStarted","Data":"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a"} Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.705443 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerName="glance-log" containerID="cri-o://b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837" gracePeriod=30 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.705742 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerName="glance-httpd" containerID="cri-o://d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a" gracePeriod=30 Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.710213 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.462351853 podStartE2EDuration="1m1.710199944s" podCreationTimestamp="2025-12-05 05:44:00 +0000 UTC" firstStartedPulling="2025-12-05 05:44:01.237021244 +0000 UTC m=+1043.473751511" lastFinishedPulling="2025-12-05 05:45:00.484869334 +0000 UTC m=+1102.721599602" observedRunningTime="2025-12-05 05:45:01.698767318 +0000 UTC m=+1103.935497585" watchObservedRunningTime="2025-12-05 05:45:01.710199944 +0000 UTC m=+1103.946930211" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.746869 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.746844058 podStartE2EDuration="8.746844058s" podCreationTimestamp="2025-12-05 05:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:01.744596285 +0000 UTC m=+1103.981326552" watchObservedRunningTime="2025-12-05 05:45:01.746844058 +0000 UTC m=+1103.983574325" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.755262 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"24e38bb8-1345-4058-a9cc-666c3b370ac8","Type":"ContainerStarted","Data":"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a"} Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.769935 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data-custom\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.770009 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f360888b-244b-4d42-80f8-6cf7e04335ab-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.770608 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.770639 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-scripts\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.770724 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f360888b-244b-4d42-80f8-6cf7e04335ab-logs\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.770762 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.770809 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqsz9\" (UniqueName: \"kubernetes.io/projected/f360888b-244b-4d42-80f8-6cf7e04335ab-kube-api-access-jqsz9\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.773169 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f360888b-244b-4d42-80f8-6cf7e04335ab-logs\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.774304 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data-custom\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.774374 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f360888b-244b-4d42-80f8-6cf7e04335ab-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.775273 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-scripts\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.778240 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.778743 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.791975 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqsz9\" (UniqueName: \"kubernetes.io/projected/f360888b-244b-4d42-80f8-6cf7e04335ab-kube-api-access-jqsz9\") pod \"cinder-api-0\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " pod="openstack/cinder-api-0" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.843841 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:01 crc kubenswrapper[4652]: I1205 05:45:01.863387 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.153725 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="296c2b64-bd68-4e42-beb4-d49560b48ebe" path="/var/lib/kubelet/pods/296c2b64-bd68-4e42-beb4-d49560b48ebe/volumes" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.154848 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e90f4813-fb7c-4375-9e7d-94673381ffae" path="/var/lib/kubelet/pods/e90f4813-fb7c-4375-9e7d-94673381ffae/volumes" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.156072 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.262914 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.294489 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-config\") pod \"a00899df-e699-4c80-96e4-761d674a03a0\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.294580 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-sb\") pod \"a00899df-e699-4c80-96e4-761d674a03a0\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.294665 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-svc\") pod \"a00899df-e699-4c80-96e4-761d674a03a0\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.294689 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-swift-storage-0\") pod \"a00899df-e699-4c80-96e4-761d674a03a0\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.294733 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-nb\") pod \"a00899df-e699-4c80-96e4-761d674a03a0\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.294804 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8w57c\" (UniqueName: \"kubernetes.io/projected/a00899df-e699-4c80-96e4-761d674a03a0-kube-api-access-8w57c\") pod \"a00899df-e699-4c80-96e4-761d674a03a0\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.321407 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a00899df-e699-4c80-96e4-761d674a03a0-kube-api-access-8w57c" (OuterVolumeSpecName: "kube-api-access-8w57c") pod "a00899df-e699-4c80-96e4-761d674a03a0" (UID: "a00899df-e699-4c80-96e4-761d674a03a0"). InnerVolumeSpecName "kube-api-access-8w57c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.329158 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.357813 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-config" (OuterVolumeSpecName: "config") pod "a00899df-e699-4c80-96e4-761d674a03a0" (UID: "a00899df-e699-4c80-96e4-761d674a03a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.387118 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a00899df-e699-4c80-96e4-761d674a03a0" (UID: "a00899df-e699-4c80-96e4-761d674a03a0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.404353 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a00899df-e699-4c80-96e4-761d674a03a0" (UID: "a00899df-e699-4c80-96e4-761d674a03a0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.404434 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-svc\") pod \"a00899df-e699-4c80-96e4-761d674a03a0\" (UID: \"a00899df-e699-4c80-96e4-761d674a03a0\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.405201 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8w57c\" (UniqueName: \"kubernetes.io/projected/a00899df-e699-4c80-96e4-761d674a03a0-kube-api-access-8w57c\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.405227 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.406314 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: W1205 05:45:02.407385 4652 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/a00899df-e699-4c80-96e4-761d674a03a0/volumes/kubernetes.io~configmap/dns-svc Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.407426 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a00899df-e699-4c80-96e4-761d674a03a0" (UID: "a00899df-e699-4c80-96e4-761d674a03a0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.419935 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a00899df-e699-4c80-96e4-761d674a03a0" (UID: "a00899df-e699-4c80-96e4-761d674a03a0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.434108 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a00899df-e699-4c80-96e4-761d674a03a0" (UID: "a00899df-e699-4c80-96e4-761d674a03a0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.509490 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.509536 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.509550 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a00899df-e699-4c80-96e4-761d674a03a0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.592137 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-m6xxk"] Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.604846 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.614864 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.716318 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.716358 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-config-data\") pod \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.716461 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-logs\") pod \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.716505 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8prvn\" (UniqueName: \"kubernetes.io/projected/3e5d534b-49ad-4464-a68a-6ea3de404e3b-kube-api-access-8prvn\") pod \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.716580 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-scripts\") pod \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.716639 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-combined-ca-bundle\") pod \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.716675 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-httpd-run\") pod \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\" (UID: \"3e5d534b-49ad-4464-a68a-6ea3de404e3b\") " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.717458 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3e5d534b-49ad-4464-a68a-6ea3de404e3b" (UID: "3e5d534b-49ad-4464-a68a-6ea3de404e3b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.717701 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-logs" (OuterVolumeSpecName: "logs") pod "3e5d534b-49ad-4464-a68a-6ea3de404e3b" (UID: "3e5d534b-49ad-4464-a68a-6ea3de404e3b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.723071 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "3e5d534b-49ad-4464-a68a-6ea3de404e3b" (UID: "3e5d534b-49ad-4464-a68a-6ea3de404e3b"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.723198 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-scripts" (OuterVolumeSpecName: "scripts") pod "3e5d534b-49ad-4464-a68a-6ea3de404e3b" (UID: "3e5d534b-49ad-4464-a68a-6ea3de404e3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.740835 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e5d534b-49ad-4464-a68a-6ea3de404e3b-kube-api-access-8prvn" (OuterVolumeSpecName: "kube-api-access-8prvn") pod "3e5d534b-49ad-4464-a68a-6ea3de404e3b" (UID: "3e5d534b-49ad-4464-a68a-6ea3de404e3b"). InnerVolumeSpecName "kube-api-access-8prvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.800499 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e5d534b-49ad-4464-a68a-6ea3de404e3b" (UID: "3e5d534b-49ad-4464-a68a-6ea3de404e3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.813627 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c2352b28-244f-49c3-92fe-02b1a5ce33b2","Type":"ContainerStarted","Data":"cc38f94566964b290b0eda608289b5852eef6162e4fa339b9dd16e96e9d9edd2"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.819332 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.819432 4652 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.819515 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.819604 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5d534b-49ad-4464-a68a-6ea3de404e3b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.819668 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8prvn\" (UniqueName: \"kubernetes.io/projected/3e5d534b-49ad-4464-a68a-6ea3de404e3b-kube-api-access-8prvn\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.819738 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.827353 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" event={"ID":"a00899df-e699-4c80-96e4-761d674a03a0","Type":"ContainerDied","Data":"80ab81fb2a8be2fafbf8710547cc7239a162fad5805e810fa2b535bfc974a2b2"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.827424 4652 scope.go:117] "RemoveContainer" containerID="71f2bca4040e75e236bb2a837785789ebeec3ba1bfe1298bf7266694994092b6" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.827633 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c68846bf-wnqsq" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.836110 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f360888b-244b-4d42-80f8-6cf7e04335ab","Type":"ContainerStarted","Data":"1bfacee5786846b72e102ef0d44f0ba98a716565ebc161c380dbcda8f0e8ee34"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.855118 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-config-data" (OuterVolumeSpecName: "config-data") pod "3e5d534b-49ad-4464-a68a-6ea3de404e3b" (UID: "3e5d534b-49ad-4464-a68a-6ea3de404e3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.857035 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" event={"ID":"9f6dcbe4-f415-43ec-a986-746cdcdeba2b","Type":"ContainerStarted","Data":"054bec4e17e545a15cb5ca25d0ff0e9bfb21477ed2cc146dbbdc731a17182108"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.865038 4652 generic.go:334] "Generic (PLEG): container finished" podID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerID="d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2" exitCode=0 Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.865307 4652 generic.go:334] "Generic (PLEG): container finished" podID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerID="dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf" exitCode=2 Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.865317 4652 generic.go:334] "Generic (PLEG): container finished" podID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerID="9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317" exitCode=0 Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.865356 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerDied","Data":"d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.865379 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerDied","Data":"dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.865390 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerDied","Data":"9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.868047 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.868064 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3e5d534b-49ad-4464-a68a-6ea3de404e3b","Type":"ContainerDied","Data":"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.868272 4652 generic.go:334] "Generic (PLEG): container finished" podID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerID="d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a" exitCode=0 Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.868304 4652 generic.go:334] "Generic (PLEG): container finished" podID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerID="b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837" exitCode=143 Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.868357 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3e5d534b-49ad-4464-a68a-6ea3de404e3b","Type":"ContainerDied","Data":"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.868373 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3e5d534b-49ad-4464-a68a-6ea3de404e3b","Type":"ContainerDied","Data":"7ac2107cb334455f52066989027692e06e87c8eea36f9b3772bb847ed5a6c859"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.869841 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerName="glance-log" containerID="cri-o://40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a" gracePeriod=30 Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.870018 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"24e38bb8-1345-4058-a9cc-666c3b370ac8","Type":"ContainerStarted","Data":"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da"} Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.870066 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerName="glance-httpd" containerID="cri-o://ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da" gracePeriod=30 Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.870263 4652 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.925285 4652 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.925311 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5d534b-49ad-4464-a68a-6ea3de404e3b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.954756 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.972880 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.972848257999999 podStartE2EDuration="9.972848258s" podCreationTimestamp="2025-12-05 05:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:02.901957816 +0000 UTC m=+1105.138688082" watchObservedRunningTime="2025-12-05 05:45:02.972848258 +0000 UTC m=+1105.209578526" Dec 05 05:45:02 crc kubenswrapper[4652]: I1205 05:45:02.985783 4652 scope.go:117] "RemoveContainer" containerID="b4d7ed97a15f09164620b5169e49db301c7ab66f735aeb8eb9f11537a352a628" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.042949 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.066983 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.082817 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-wnqsq"] Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.098344 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84c68846bf-wnqsq"] Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.098983 4652 scope.go:117] "RemoveContainer" containerID="d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.109171 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:03 crc kubenswrapper[4652]: E1205 05:45:03.109775 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a00899df-e699-4c80-96e4-761d674a03a0" containerName="init" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.109791 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a00899df-e699-4c80-96e4-761d674a03a0" containerName="init" Dec 05 05:45:03 crc kubenswrapper[4652]: E1205 05:45:03.109806 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a00899df-e699-4c80-96e4-761d674a03a0" containerName="dnsmasq-dns" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.109813 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a00899df-e699-4c80-96e4-761d674a03a0" containerName="dnsmasq-dns" Dec 05 05:45:03 crc kubenswrapper[4652]: E1205 05:45:03.109835 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerName="glance-log" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.109842 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerName="glance-log" Dec 05 05:45:03 crc kubenswrapper[4652]: E1205 05:45:03.109859 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerName="glance-httpd" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.109865 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerName="glance-httpd" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.110070 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="a00899df-e699-4c80-96e4-761d674a03a0" containerName="dnsmasq-dns" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.110088 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerName="glance-log" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.110103 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" containerName="glance-httpd" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.111362 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.114739 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.115031 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.115299 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.171145 4652 scope.go:117] "RemoveContainer" containerID="b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.236224 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t79bc\" (UniqueName: \"kubernetes.io/projected/9f42d1f3-d588-4065-8c30-41c5309f45cb-kube-api-access-t79bc\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.236459 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-config-data\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.236505 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.236535 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.236609 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.236640 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-logs\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.236666 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.236691 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-scripts\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.339663 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-logs\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.339709 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.339751 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-scripts\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.339804 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t79bc\" (UniqueName: \"kubernetes.io/projected/9f42d1f3-d588-4065-8c30-41c5309f45cb-kube-api-access-t79bc\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.339828 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-config-data\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.339867 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.339883 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.339943 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.340395 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.340637 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-logs\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.343011 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.361276 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.363404 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-config-data\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.364659 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-scripts\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.367440 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t79bc\" (UniqueName: \"kubernetes.io/projected/9f42d1f3-d588-4065-8c30-41c5309f45cb-kube-api-access-t79bc\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.367784 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.399150 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.447289 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.462640 4652 scope.go:117] "RemoveContainer" containerID="d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a" Dec 05 05:45:03 crc kubenswrapper[4652]: E1205 05:45:03.464130 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a\": container with ID starting with d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a not found: ID does not exist" containerID="d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.464186 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a"} err="failed to get container status \"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a\": rpc error: code = NotFound desc = could not find container \"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a\": container with ID starting with d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a not found: ID does not exist" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.464224 4652 scope.go:117] "RemoveContainer" containerID="b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837" Dec 05 05:45:03 crc kubenswrapper[4652]: E1205 05:45:03.464597 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837\": container with ID starting with b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837 not found: ID does not exist" containerID="b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.464630 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837"} err="failed to get container status \"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837\": rpc error: code = NotFound desc = could not find container \"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837\": container with ID starting with b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837 not found: ID does not exist" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.464647 4652 scope.go:117] "RemoveContainer" containerID="d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.473409 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.473775 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a"} err="failed to get container status \"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a\": rpc error: code = NotFound desc = could not find container \"d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a\": container with ID starting with d2366de14ed1823841bfcdc3a38f0ab872cabd075d16347825de74e584406f2a not found: ID does not exist" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.473830 4652 scope.go:117] "RemoveContainer" containerID="b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.474285 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837"} err="failed to get container status \"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837\": rpc error: code = NotFound desc = could not find container \"b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837\": container with ID starting with b48b77d2eb6562deee7a90d5f1ae89d87c79c83ccaebcfae6675165e4df81837 not found: ID does not exist" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.505649 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.635999 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.655943 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pft7\" (UniqueName: \"kubernetes.io/projected/99af3ed8-4da1-4029-8876-20a311b1762c-kube-api-access-8pft7\") pod \"99af3ed8-4da1-4029-8876-20a311b1762c\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.656019 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99af3ed8-4da1-4029-8876-20a311b1762c-config-volume\") pod \"99af3ed8-4da1-4029-8876-20a311b1762c\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.656088 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99af3ed8-4da1-4029-8876-20a311b1762c-secret-volume\") pod \"99af3ed8-4da1-4029-8876-20a311b1762c\" (UID: \"99af3ed8-4da1-4029-8876-20a311b1762c\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.659359 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99af3ed8-4da1-4029-8876-20a311b1762c-config-volume" (OuterVolumeSpecName: "config-volume") pod "99af3ed8-4da1-4029-8876-20a311b1762c" (UID: "99af3ed8-4da1-4029-8876-20a311b1762c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.669081 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99af3ed8-4da1-4029-8876-20a311b1762c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "99af3ed8-4da1-4029-8876-20a311b1762c" (UID: "99af3ed8-4da1-4029-8876-20a311b1762c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.673696 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99af3ed8-4da1-4029-8876-20a311b1762c-kube-api-access-8pft7" (OuterVolumeSpecName: "kube-api-access-8pft7") pod "99af3ed8-4da1-4029-8876-20a311b1762c" (UID: "99af3ed8-4da1-4029-8876-20a311b1762c"). InnerVolumeSpecName "kube-api-access-8pft7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.758009 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-config-data\") pod \"24e38bb8-1345-4058-a9cc-666c3b370ac8\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.758133 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-combined-ca-bundle\") pod \"24e38bb8-1345-4058-a9cc-666c3b370ac8\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.758260 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-scripts\") pod \"24e38bb8-1345-4058-a9cc-666c3b370ac8\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.758426 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"24e38bb8-1345-4058-a9cc-666c3b370ac8\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.758845 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-httpd-run\") pod \"24e38bb8-1345-4058-a9cc-666c3b370ac8\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.758997 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2bx5\" (UniqueName: \"kubernetes.io/projected/24e38bb8-1345-4058-a9cc-666c3b370ac8-kube-api-access-n2bx5\") pod \"24e38bb8-1345-4058-a9cc-666c3b370ac8\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.759150 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-logs\") pod \"24e38bb8-1345-4058-a9cc-666c3b370ac8\" (UID: \"24e38bb8-1345-4058-a9cc-666c3b370ac8\") " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.759894 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "24e38bb8-1345-4058-a9cc-666c3b370ac8" (UID: "24e38bb8-1345-4058-a9cc-666c3b370ac8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.760311 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-logs" (OuterVolumeSpecName: "logs") pod "24e38bb8-1345-4058-a9cc-666c3b370ac8" (UID: "24e38bb8-1345-4058-a9cc-666c3b370ac8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.761096 4652 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.761141 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pft7\" (UniqueName: \"kubernetes.io/projected/99af3ed8-4da1-4029-8876-20a311b1762c-kube-api-access-8pft7\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.761155 4652 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99af3ed8-4da1-4029-8876-20a311b1762c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.761164 4652 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99af3ed8-4da1-4029-8876-20a311b1762c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.761173 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e38bb8-1345-4058-a9cc-666c3b370ac8-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.765773 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24e38bb8-1345-4058-a9cc-666c3b370ac8-kube-api-access-n2bx5" (OuterVolumeSpecName: "kube-api-access-n2bx5") pod "24e38bb8-1345-4058-a9cc-666c3b370ac8" (UID: "24e38bb8-1345-4058-a9cc-666c3b370ac8"). InnerVolumeSpecName "kube-api-access-n2bx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.766464 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-scripts" (OuterVolumeSpecName: "scripts") pod "24e38bb8-1345-4058-a9cc-666c3b370ac8" (UID: "24e38bb8-1345-4058-a9cc-666c3b370ac8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.769032 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "24e38bb8-1345-4058-a9cc-666c3b370ac8" (UID: "24e38bb8-1345-4058-a9cc-666c3b370ac8"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.793221 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "24e38bb8-1345-4058-a9cc-666c3b370ac8" (UID: "24e38bb8-1345-4058-a9cc-666c3b370ac8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.815764 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-config-data" (OuterVolumeSpecName: "config-data") pod "24e38bb8-1345-4058-a9cc-666c3b370ac8" (UID: "24e38bb8-1345-4058-a9cc-666c3b370ac8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.863333 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.863630 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.863643 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24e38bb8-1345-4058-a9cc-666c3b370ac8-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.863671 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.863682 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2bx5\" (UniqueName: \"kubernetes.io/projected/24e38bb8-1345-4058-a9cc-666c3b370ac8-kube-api-access-n2bx5\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.888894 4652 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.902250 4652 generic.go:334] "Generic (PLEG): container finished" podID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" containerID="8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3" exitCode=0 Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.902301 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" event={"ID":"9f6dcbe4-f415-43ec-a986-746cdcdeba2b","Type":"ContainerDied","Data":"8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3"} Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.926399 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"24e38bb8-1345-4058-a9cc-666c3b370ac8","Type":"ContainerDied","Data":"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da"} Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.926430 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.926215 4652 generic.go:334] "Generic (PLEG): container finished" podID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerID="ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da" exitCode=0 Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.927177 4652 generic.go:334] "Generic (PLEG): container finished" podID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerID="40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a" exitCode=143 Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.927439 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"24e38bb8-1345-4058-a9cc-666c3b370ac8","Type":"ContainerDied","Data":"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a"} Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.927483 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"24e38bb8-1345-4058-a9cc-666c3b370ac8","Type":"ContainerDied","Data":"d475ec0ac24fd6807dbbb1d6323091037f91ec0065c7082a2afafeb7f9fecff7"} Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.926499 4652 scope.go:117] "RemoveContainer" containerID="ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.941017 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" event={"ID":"99af3ed8-4da1-4029-8876-20a311b1762c","Type":"ContainerDied","Data":"1ede94ddc51c6af7f34b04337335c872afc0899691a6f77213ba0a8cf7afa0ec"} Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.941048 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ede94ddc51c6af7f34b04337335c872afc0899691a6f77213ba0a8cf7afa0ec" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.941104 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94" Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.954345 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f360888b-244b-4d42-80f8-6cf7e04335ab","Type":"ContainerStarted","Data":"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61"} Dec 05 05:45:03 crc kubenswrapper[4652]: I1205 05:45:03.965454 4652 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.065785 4652 scope.go:117] "RemoveContainer" containerID="40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.065907 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.082264 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.093794 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:04 crc kubenswrapper[4652]: E1205 05:45:04.094208 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99af3ed8-4da1-4029-8876-20a311b1762c" containerName="collect-profiles" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.094221 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="99af3ed8-4da1-4029-8876-20a311b1762c" containerName="collect-profiles" Dec 05 05:45:04 crc kubenswrapper[4652]: E1205 05:45:04.094233 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerName="glance-httpd" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.094240 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerName="glance-httpd" Dec 05 05:45:04 crc kubenswrapper[4652]: E1205 05:45:04.094274 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerName="glance-log" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.094280 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerName="glance-log" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.094440 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="99af3ed8-4da1-4029-8876-20a311b1762c" containerName="collect-profiles" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.094454 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerName="glance-httpd" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.094462 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" containerName="glance-log" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.095687 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.098598 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.100950 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.104815 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.150719 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24e38bb8-1345-4058-a9cc-666c3b370ac8" path="/var/lib/kubelet/pods/24e38bb8-1345-4058-a9cc-666c3b370ac8/volumes" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.151184 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.151224 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.151656 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e5d534b-49ad-4464-a68a-6ea3de404e3b" path="/var/lib/kubelet/pods/3e5d534b-49ad-4464-a68a-6ea3de404e3b/volumes" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.152351 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a00899df-e699-4c80-96e4-761d674a03a0" path="/var/lib/kubelet/pods/a00899df-e699-4c80-96e4-761d674a03a0/volumes" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.153493 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.153538 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.154429 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e8d98548aa27c73c99040f551d5e2c229be9c9f00418747bea9244c8abd5fdd"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.154475 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://0e8d98548aa27c73c99040f551d5e2c229be9c9f00418747bea9244c8abd5fdd" gracePeriod=600 Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.157151 4652 scope.go:117] "RemoveContainer" containerID="ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da" Dec 05 05:45:04 crc kubenswrapper[4652]: E1205 05:45:04.157866 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da\": container with ID starting with ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da not found: ID does not exist" containerID="ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.157891 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da"} err="failed to get container status \"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da\": rpc error: code = NotFound desc = could not find container \"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da\": container with ID starting with ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da not found: ID does not exist" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.157906 4652 scope.go:117] "RemoveContainer" containerID="40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a" Dec 05 05:45:04 crc kubenswrapper[4652]: E1205 05:45:04.159472 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a\": container with ID starting with 40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a not found: ID does not exist" containerID="40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.159509 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a"} err="failed to get container status \"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a\": rpc error: code = NotFound desc = could not find container \"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a\": container with ID starting with 40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a not found: ID does not exist" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.159549 4652 scope.go:117] "RemoveContainer" containerID="ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.168843 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da"} err="failed to get container status \"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da\": rpc error: code = NotFound desc = could not find container \"ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da\": container with ID starting with ffd5c74f067a911d1db87ee6bdb5506374ed7fd78d8edca19d82f76180aab0da not found: ID does not exist" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.168865 4652 scope.go:117] "RemoveContainer" containerID="40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.171075 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a"} err="failed to get container status \"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a\": rpc error: code = NotFound desc = could not find container \"40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a\": container with ID starting with 40ee75bf1529497dee89a35c1c42f1d59752d34e41d1ee6cd0398666e8b2765a not found: ID does not exist" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.172108 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.172166 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.172225 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-logs\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.172289 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.172317 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl4rv\" (UniqueName: \"kubernetes.io/projected/4dccdfc3-432a-459c-a940-1d167d41405b-kube-api-access-cl4rv\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.172350 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.172426 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.172473 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275058 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-logs\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275370 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275402 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl4rv\" (UniqueName: \"kubernetes.io/projected/4dccdfc3-432a-459c-a940-1d167d41405b-kube-api-access-cl4rv\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275458 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275509 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275597 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275624 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275653 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.275665 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-logs\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.276395 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.276590 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.280842 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.281348 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.284709 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.286878 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.290713 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl4rv\" (UniqueName: \"kubernetes.io/projected/4dccdfc3-432a-459c-a940-1d167d41405b-kube-api-access-cl4rv\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.331034 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.369765 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.447904 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.715353 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.715714 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.716984 4652 scope.go:117] "RemoveContainer" containerID="45845c4c4d40fa1001367e8f6d74e8f4fc8cddaa1c3feae8f78472a5ef8c3442" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.982472 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="0e8d98548aa27c73c99040f551d5e2c229be9c9f00418747bea9244c8abd5fdd" exitCode=0 Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.982760 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"0e8d98548aa27c73c99040f551d5e2c229be9c9f00418747bea9244c8abd5fdd"} Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.982789 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"3f2f5ede1a2a06f286baf6cd6bd2d3f9a5125ae0f0e1be6280b0ee6eb87a5ea7"} Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.982808 4652 scope.go:117] "RemoveContainer" containerID="30c3258c2639df7b2105ca40ba8c89dc3f283f9909c72ed9341ff06120095f7d" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.996703 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f360888b-244b-4d42-80f8-6cf7e04335ab","Type":"ContainerStarted","Data":"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc"} Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.997039 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerName="cinder-api-log" containerID="cri-o://7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61" gracePeriod=30 Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.997273 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 05:45:04 crc kubenswrapper[4652]: I1205 05:45:04.997305 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerName="cinder-api" containerID="cri-o://b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc" gracePeriod=30 Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.015241 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" event={"ID":"9f6dcbe4-f415-43ec-a986-746cdcdeba2b","Type":"ContainerStarted","Data":"10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4"} Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.015413 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.020791 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.046452 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f42d1f3-d588-4065-8c30-41c5309f45cb","Type":"ContainerStarted","Data":"a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33"} Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.046512 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f42d1f3-d588-4065-8c30-41c5309f45cb","Type":"ContainerStarted","Data":"923c2729f6783933dc99cadd16d54e26c05c323ce357f5478ce7798fd8b6852e"} Dec 05 05:45:05 crc kubenswrapper[4652]: W1205 05:45:05.064475 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4dccdfc3_432a_459c_a940_1d167d41405b.slice/crio-98a57cdf0db2b8e3448ea1972ec204cccd4ee9e37eb961bd91c82add6bc00147 WatchSource:0}: Error finding container 98a57cdf0db2b8e3448ea1972ec204cccd4ee9e37eb961bd91c82add6bc00147: Status 404 returned error can't find the container with id 98a57cdf0db2b8e3448ea1972ec204cccd4ee9e37eb961bd91c82add6bc00147 Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.068113 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c2352b28-244f-49c3-92fe-02b1a5ce33b2","Type":"ContainerStarted","Data":"34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f"} Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.088428 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.088409181 podStartE2EDuration="4.088409181s" podCreationTimestamp="2025-12-05 05:45:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:05.02484182 +0000 UTC m=+1107.261572086" watchObservedRunningTime="2025-12-05 05:45:05.088409181 +0000 UTC m=+1107.325139449" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.099511 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" podStartSLOduration=4.09948692 podStartE2EDuration="4.09948692s" podCreationTimestamp="2025-12-05 05:45:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:05.043862575 +0000 UTC m=+1107.280592843" watchObservedRunningTime="2025-12-05 05:45:05.09948692 +0000 UTC m=+1107.336217187" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.200011 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-c77c7b944-twjsn" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.249236 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7744f5c9d6-t75mf"] Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.249457 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7744f5c9d6-t75mf" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon-log" containerID="cri-o://1a97cb99478e8e1f70f698c3e507c2e8e7e789636755f4bc50bd106c55435730" gracePeriod=30 Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.249625 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7744f5c9d6-t75mf" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon" containerID="cri-o://7c9d3fec70cacda0b535157a56f5d7fa1a3c1cedf136c200c7a49e146e79f017" gracePeriod=30 Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.595758 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.719901 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f360888b-244b-4d42-80f8-6cf7e04335ab-etc-machine-id\") pod \"f360888b-244b-4d42-80f8-6cf7e04335ab\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.719975 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f360888b-244b-4d42-80f8-6cf7e04335ab-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f360888b-244b-4d42-80f8-6cf7e04335ab" (UID: "f360888b-244b-4d42-80f8-6cf7e04335ab"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.720079 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data-custom\") pod \"f360888b-244b-4d42-80f8-6cf7e04335ab\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.720105 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f360888b-244b-4d42-80f8-6cf7e04335ab-logs\") pod \"f360888b-244b-4d42-80f8-6cf7e04335ab\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.720535 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f360888b-244b-4d42-80f8-6cf7e04335ab-logs" (OuterVolumeSpecName: "logs") pod "f360888b-244b-4d42-80f8-6cf7e04335ab" (UID: "f360888b-244b-4d42-80f8-6cf7e04335ab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.720632 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data\") pod \"f360888b-244b-4d42-80f8-6cf7e04335ab\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.720666 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-combined-ca-bundle\") pod \"f360888b-244b-4d42-80f8-6cf7e04335ab\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.720784 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-scripts\") pod \"f360888b-244b-4d42-80f8-6cf7e04335ab\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.720812 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqsz9\" (UniqueName: \"kubernetes.io/projected/f360888b-244b-4d42-80f8-6cf7e04335ab-kube-api-access-jqsz9\") pod \"f360888b-244b-4d42-80f8-6cf7e04335ab\" (UID: \"f360888b-244b-4d42-80f8-6cf7e04335ab\") " Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.721376 4652 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f360888b-244b-4d42-80f8-6cf7e04335ab-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.721393 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f360888b-244b-4d42-80f8-6cf7e04335ab-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.723356 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f360888b-244b-4d42-80f8-6cf7e04335ab" (UID: "f360888b-244b-4d42-80f8-6cf7e04335ab"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.726006 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f360888b-244b-4d42-80f8-6cf7e04335ab-kube-api-access-jqsz9" (OuterVolumeSpecName: "kube-api-access-jqsz9") pod "f360888b-244b-4d42-80f8-6cf7e04335ab" (UID: "f360888b-244b-4d42-80f8-6cf7e04335ab"). InnerVolumeSpecName "kube-api-access-jqsz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.729238 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-scripts" (OuterVolumeSpecName: "scripts") pod "f360888b-244b-4d42-80f8-6cf7e04335ab" (UID: "f360888b-244b-4d42-80f8-6cf7e04335ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.757849 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f360888b-244b-4d42-80f8-6cf7e04335ab" (UID: "f360888b-244b-4d42-80f8-6cf7e04335ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.779663 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data" (OuterVolumeSpecName: "config-data") pod "f360888b-244b-4d42-80f8-6cf7e04335ab" (UID: "f360888b-244b-4d42-80f8-6cf7e04335ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.822358 4652 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.822386 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.822397 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.822407 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f360888b-244b-4d42-80f8-6cf7e04335ab-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:05 crc kubenswrapper[4652]: I1205 05:45:05.822415 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqsz9\" (UniqueName: \"kubernetes.io/projected/f360888b-244b-4d42-80f8-6cf7e04335ab-kube-api-access-jqsz9\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.079132 4652 generic.go:334] "Generic (PLEG): container finished" podID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerID="b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc" exitCode=0 Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.079172 4652 generic.go:334] "Generic (PLEG): container finished" podID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerID="7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61" exitCode=143 Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.079210 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.079244 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f360888b-244b-4d42-80f8-6cf7e04335ab","Type":"ContainerDied","Data":"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc"} Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.079285 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f360888b-244b-4d42-80f8-6cf7e04335ab","Type":"ContainerDied","Data":"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61"} Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.079296 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f360888b-244b-4d42-80f8-6cf7e04335ab","Type":"ContainerDied","Data":"1bfacee5786846b72e102ef0d44f0ba98a716565ebc161c380dbcda8f0e8ee34"} Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.079316 4652 scope.go:117] "RemoveContainer" containerID="b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.089264 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4dccdfc3-432a-459c-a940-1d167d41405b","Type":"ContainerStarted","Data":"8792eb6cec75227540052c82010fe2cb97fbbdd431c72ef97e27a65613474b8b"} Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.089303 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4dccdfc3-432a-459c-a940-1d167d41405b","Type":"ContainerStarted","Data":"98a57cdf0db2b8e3448ea1972ec204cccd4ee9e37eb961bd91c82add6bc00147"} Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.094898 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerStarted","Data":"7d19c23409283640cb1bd3a0d34c8d831faf4762b4108458c1b8b95e16face3c"} Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.106981 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f42d1f3-d588-4065-8c30-41c5309f45cb","Type":"ContainerStarted","Data":"67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee"} Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.109948 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.125170 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.150898 4652 scope.go:117] "RemoveContainer" containerID="7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.159014 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.158997582 podStartE2EDuration="3.158997582s" podCreationTimestamp="2025-12-05 05:45:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:06.157541506 +0000 UTC m=+1108.394271774" watchObservedRunningTime="2025-12-05 05:45:06.158997582 +0000 UTC m=+1108.395727849" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.169441 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" path="/var/lib/kubelet/pods/f360888b-244b-4d42-80f8-6cf7e04335ab/volumes" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.170349 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c2352b28-244f-49c3-92fe-02b1a5ce33b2","Type":"ContainerStarted","Data":"81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa"} Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.170381 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:06 crc kubenswrapper[4652]: E1205 05:45:06.170815 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerName="cinder-api" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.170831 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerName="cinder-api" Dec 05 05:45:06 crc kubenswrapper[4652]: E1205 05:45:06.170845 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerName="cinder-api-log" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.170851 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerName="cinder-api-log" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.174046 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerName="cinder-api-log" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.174083 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f360888b-244b-4d42-80f8-6cf7e04335ab" containerName="cinder-api" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.176476 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.176611 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.179682 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.179899 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.180037 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.193002 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.049768011 podStartE2EDuration="5.192984304s" podCreationTimestamp="2025-12-05 05:45:01 +0000 UTC" firstStartedPulling="2025-12-05 05:45:02.341882436 +0000 UTC m=+1104.578612703" lastFinishedPulling="2025-12-05 05:45:03.485098729 +0000 UTC m=+1105.721828996" observedRunningTime="2025-12-05 05:45:06.177675975 +0000 UTC m=+1108.414406241" watchObservedRunningTime="2025-12-05 05:45:06.192984304 +0000 UTC m=+1108.429714570" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.239743 4652 scope.go:117] "RemoveContainer" containerID="b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc" Dec 05 05:45:06 crc kubenswrapper[4652]: E1205 05:45:06.240440 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc\": container with ID starting with b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc not found: ID does not exist" containerID="b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.240582 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc"} err="failed to get container status \"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc\": rpc error: code = NotFound desc = could not find container \"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc\": container with ID starting with b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc not found: ID does not exist" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.240677 4652 scope.go:117] "RemoveContainer" containerID="7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.240950 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9657e858-3fdb-4e7d-973d-21d1a05592b2-logs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.241207 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.243006 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-config-data\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.243069 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.243232 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9657e858-3fdb-4e7d-973d-21d1a05592b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.243285 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-scripts\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.243437 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz6px\" (UniqueName: \"kubernetes.io/projected/9657e858-3fdb-4e7d-973d-21d1a05592b2-kube-api-access-wz6px\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.243669 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.243747 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: E1205 05:45:06.244703 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61\": container with ID starting with 7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61 not found: ID does not exist" containerID="7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.244743 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61"} err="failed to get container status \"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61\": rpc error: code = NotFound desc = could not find container \"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61\": container with ID starting with 7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61 not found: ID does not exist" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.244758 4652 scope.go:117] "RemoveContainer" containerID="b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.245160 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc"} err="failed to get container status \"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc\": rpc error: code = NotFound desc = could not find container \"b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc\": container with ID starting with b56ce9bf9c1f9d2a258803e7960c12f1f73b02c0ddc0e1bf72dcdd47f20ea9fc not found: ID does not exist" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.245201 4652 scope.go:117] "RemoveContainer" containerID="7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.245496 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61"} err="failed to get container status \"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61\": rpc error: code = NotFound desc = could not find container \"7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61\": container with ID starting with 7b310f5a19654b1ba7ff5dd0b9b8b7310989efd12ee1985ed63ecffd15351e61 not found: ID does not exist" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.345224 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346106 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346161 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9657e858-3fdb-4e7d-973d-21d1a05592b2-logs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346225 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346296 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-config-data\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346321 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346354 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9657e858-3fdb-4e7d-973d-21d1a05592b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346375 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-scripts\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346403 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz6px\" (UniqueName: \"kubernetes.io/projected/9657e858-3fdb-4e7d-973d-21d1a05592b2-kube-api-access-wz6px\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.346962 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9657e858-3fdb-4e7d-973d-21d1a05592b2-logs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.347010 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9657e858-3fdb-4e7d-973d-21d1a05592b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.351160 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.351758 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.351854 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.352652 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-scripts\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.353240 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.359148 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9657e858-3fdb-4e7d-973d-21d1a05592b2-config-data\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.360281 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz6px\" (UniqueName: \"kubernetes.io/projected/9657e858-3fdb-4e7d-973d-21d1a05592b2-kube-api-access-wz6px\") pod \"cinder-api-0\" (UID: \"9657e858-3fdb-4e7d-973d-21d1a05592b2\") " pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.521605 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.649756 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.697728 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.754412 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-run-httpd\") pod \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.754533 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-config-data\") pod \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.754568 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pf97b\" (UniqueName: \"kubernetes.io/projected/70b3ad88-854c-4047-adbe-a9a7c01b9f81-kube-api-access-pf97b\") pod \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.754589 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-scripts\") pod \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.754629 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-sg-core-conf-yaml\") pod \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.754769 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-log-httpd\") pod \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.754930 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-combined-ca-bundle\") pod \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\" (UID: \"70b3ad88-854c-4047-adbe-a9a7c01b9f81\") " Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.755142 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "70b3ad88-854c-4047-adbe-a9a7c01b9f81" (UID: "70b3ad88-854c-4047-adbe-a9a7c01b9f81"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.756378 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "70b3ad88-854c-4047-adbe-a9a7c01b9f81" (UID: "70b3ad88-854c-4047-adbe-a9a7c01b9f81"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.757254 4652 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.757276 4652 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70b3ad88-854c-4047-adbe-a9a7c01b9f81-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.761785 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-scripts" (OuterVolumeSpecName: "scripts") pod "70b3ad88-854c-4047-adbe-a9a7c01b9f81" (UID: "70b3ad88-854c-4047-adbe-a9a7c01b9f81"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.762455 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70b3ad88-854c-4047-adbe-a9a7c01b9f81-kube-api-access-pf97b" (OuterVolumeSpecName: "kube-api-access-pf97b") pod "70b3ad88-854c-4047-adbe-a9a7c01b9f81" (UID: "70b3ad88-854c-4047-adbe-a9a7c01b9f81"). InnerVolumeSpecName "kube-api-access-pf97b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.780331 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "70b3ad88-854c-4047-adbe-a9a7c01b9f81" (UID: "70b3ad88-854c-4047-adbe-a9a7c01b9f81"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.833256 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70b3ad88-854c-4047-adbe-a9a7c01b9f81" (UID: "70b3ad88-854c-4047-adbe-a9a7c01b9f81"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.849643 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-config-data" (OuterVolumeSpecName: "config-data") pod "70b3ad88-854c-4047-adbe-a9a7c01b9f81" (UID: "70b3ad88-854c-4047-adbe-a9a7c01b9f81"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.859919 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.859953 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pf97b\" (UniqueName: \"kubernetes.io/projected/70b3ad88-854c-4047-adbe-a9a7c01b9f81-kube-api-access-pf97b\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.859966 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.859974 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.859983 4652 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70b3ad88-854c-4047-adbe-a9a7c01b9f81-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:06 crc kubenswrapper[4652]: I1205 05:45:06.984629 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.190225 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4dccdfc3-432a-459c-a940-1d167d41405b","Type":"ContainerStarted","Data":"86e31d072f4e3486b357fe0bf2c9aadcf2e4861cdcc0f2e2ec938a7319aca2df"} Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.192110 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9657e858-3fdb-4e7d-973d-21d1a05592b2","Type":"ContainerStarted","Data":"6d4bce572a4a138475a56712effa50012f4a56d4178c311b89db19d367419f8a"} Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.195896 4652 generic.go:334] "Generic (PLEG): container finished" podID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerID="6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58" exitCode=0 Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.195963 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.195974 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerDied","Data":"6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58"} Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.196020 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70b3ad88-854c-4047-adbe-a9a7c01b9f81","Type":"ContainerDied","Data":"5f2c33c0d73400c617cf78562185392350fbabd29c71ebbf2b120df6cca535ef"} Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.196042 4652 scope.go:117] "RemoveContainer" containerID="d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.200955 4652 generic.go:334] "Generic (PLEG): container finished" podID="4e04a533-fea2-4fde-a50b-5852129fa912" containerID="7c9d3fec70cacda0b535157a56f5d7fa1a3c1cedf136c200c7a49e146e79f017" exitCode=0 Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.201047 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7744f5c9d6-t75mf" event={"ID":"4e04a533-fea2-4fde-a50b-5852129fa912","Type":"ContainerDied","Data":"7c9d3fec70cacda0b535157a56f5d7fa1a3c1cedf136c200c7a49e146e79f017"} Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.217687 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.217669809 podStartE2EDuration="3.217669809s" podCreationTimestamp="2025-12-05 05:45:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:07.207754446 +0000 UTC m=+1109.444484713" watchObservedRunningTime="2025-12-05 05:45:07.217669809 +0000 UTC m=+1109.454400076" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.231867 4652 scope.go:117] "RemoveContainer" containerID="dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.254916 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.262457 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.293868 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:07 crc kubenswrapper[4652]: E1205 05:45:07.294671 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="proxy-httpd" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.294689 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="proxy-httpd" Dec 05 05:45:07 crc kubenswrapper[4652]: E1205 05:45:07.294714 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="ceilometer-notification-agent" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.294720 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="ceilometer-notification-agent" Dec 05 05:45:07 crc kubenswrapper[4652]: E1205 05:45:07.294747 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="ceilometer-central-agent" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.294754 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="ceilometer-central-agent" Dec 05 05:45:07 crc kubenswrapper[4652]: E1205 05:45:07.294793 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="sg-core" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.294798 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="sg-core" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.295289 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="proxy-httpd" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.295320 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="ceilometer-notification-agent" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.295332 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="ceilometer-central-agent" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.295352 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" containerName="sg-core" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.298934 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.305225 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.306773 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.307655 4652 scope.go:117] "RemoveContainer" containerID="6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.313266 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.340232 4652 scope.go:117] "RemoveContainer" containerID="9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.365132 4652 scope.go:117] "RemoveContainer" containerID="d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2" Dec 05 05:45:07 crc kubenswrapper[4652]: E1205 05:45:07.365898 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2\": container with ID starting with d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2 not found: ID does not exist" containerID="d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.365942 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2"} err="failed to get container status \"d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2\": rpc error: code = NotFound desc = could not find container \"d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2\": container with ID starting with d85adbbcf45a080b1a9d70ad0b67fc44da98a815d281a45908cbb68b461886e2 not found: ID does not exist" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.365975 4652 scope.go:117] "RemoveContainer" containerID="dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf" Dec 05 05:45:07 crc kubenswrapper[4652]: E1205 05:45:07.366360 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf\": container with ID starting with dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf not found: ID does not exist" containerID="dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.366380 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf"} err="failed to get container status \"dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf\": rpc error: code = NotFound desc = could not find container \"dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf\": container with ID starting with dbac560b4ba7b753b1d2badc5dd5e4fdb989365290df512d0f8b299c3ca3f2cf not found: ID does not exist" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.366412 4652 scope.go:117] "RemoveContainer" containerID="6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58" Dec 05 05:45:07 crc kubenswrapper[4652]: E1205 05:45:07.367488 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58\": container with ID starting with 6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58 not found: ID does not exist" containerID="6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.367532 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58"} err="failed to get container status \"6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58\": rpc error: code = NotFound desc = could not find container \"6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58\": container with ID starting with 6f58e3f2254b1da338fa17cf07fa80fd8b77d77b31044bbf30bcf6e8778e3f58 not found: ID does not exist" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.367574 4652 scope.go:117] "RemoveContainer" containerID="9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317" Dec 05 05:45:07 crc kubenswrapper[4652]: E1205 05:45:07.367880 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317\": container with ID starting with 9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317 not found: ID does not exist" containerID="9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.367916 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317"} err="failed to get container status \"9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317\": rpc error: code = NotFound desc = could not find container \"9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317\": container with ID starting with 9bd9f7931f9c421efa899fc09bdb3b0f07648f8d43834cc6a7a9fd2eeb6db317 not found: ID does not exist" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.495470 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-scripts\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.495828 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-run-httpd\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.495947 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.496154 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-log-httpd\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.496319 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.496426 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgqwb\" (UniqueName: \"kubernetes.io/projected/d3affc2d-d5ee-40f8-8f91-32097f3393dc-kube-api-access-rgqwb\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.496681 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-config-data\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.600249 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.600327 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgqwb\" (UniqueName: \"kubernetes.io/projected/d3affc2d-d5ee-40f8-8f91-32097f3393dc-kube-api-access-rgqwb\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.600376 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-config-data\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.600506 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-scripts\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.600541 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-run-httpd\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.600586 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.601693 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-log-httpd\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.602234 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-log-httpd\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.602640 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-run-httpd\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.604139 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.604985 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-scripts\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.605351 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.607843 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-config-data\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.614853 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgqwb\" (UniqueName: \"kubernetes.io/projected/d3affc2d-d5ee-40f8-8f91-32097f3393dc-kube-api-access-rgqwb\") pod \"ceilometer-0\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " pod="openstack/ceilometer-0" Dec 05 05:45:07 crc kubenswrapper[4652]: I1205 05:45:07.624850 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:08 crc kubenswrapper[4652]: I1205 05:45:08.051749 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:08 crc kubenswrapper[4652]: W1205 05:45:08.076180 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3affc2d_d5ee_40f8_8f91_32097f3393dc.slice/crio-da95b7ce592f9b8f087e171bec98dce6c626af23169615162a988bba3db5c622 WatchSource:0}: Error finding container da95b7ce592f9b8f087e171bec98dce6c626af23169615162a988bba3db5c622: Status 404 returned error can't find the container with id da95b7ce592f9b8f087e171bec98dce6c626af23169615162a988bba3db5c622 Dec 05 05:45:08 crc kubenswrapper[4652]: I1205 05:45:08.144659 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70b3ad88-854c-4047-adbe-a9a7c01b9f81" path="/var/lib/kubelet/pods/70b3ad88-854c-4047-adbe-a9a7c01b9f81/volumes" Dec 05 05:45:08 crc kubenswrapper[4652]: I1205 05:45:08.218895 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerStarted","Data":"da95b7ce592f9b8f087e171bec98dce6c626af23169615162a988bba3db5c622"} Dec 05 05:45:08 crc kubenswrapper[4652]: I1205 05:45:08.220770 4652 generic.go:334] "Generic (PLEG): container finished" podID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerID="7d19c23409283640cb1bd3a0d34c8d831faf4762b4108458c1b8b95e16face3c" exitCode=1 Dec 05 05:45:08 crc kubenswrapper[4652]: I1205 05:45:08.220824 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerDied","Data":"7d19c23409283640cb1bd3a0d34c8d831faf4762b4108458c1b8b95e16face3c"} Dec 05 05:45:08 crc kubenswrapper[4652]: I1205 05:45:08.220868 4652 scope.go:117] "RemoveContainer" containerID="45845c4c4d40fa1001367e8f6d74e8f4fc8cddaa1c3feae8f78472a5ef8c3442" Dec 05 05:45:08 crc kubenswrapper[4652]: I1205 05:45:08.221429 4652 scope.go:117] "RemoveContainer" containerID="7d19c23409283640cb1bd3a0d34c8d831faf4762b4108458c1b8b95e16face3c" Dec 05 05:45:08 crc kubenswrapper[4652]: E1205 05:45:08.221738 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(ca914a04-6a6f-4b20-af32-e0771a7dffa5)\"" pod="openstack/watcher-decision-engine-0" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" Dec 05 05:45:08 crc kubenswrapper[4652]: I1205 05:45:08.223508 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9657e858-3fdb-4e7d-973d-21d1a05592b2","Type":"ContainerStarted","Data":"de5bc90604ee13635fc350aabe0b82841cc7bb89a0522151ddc73a148e4c17cc"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.235987 4652 generic.go:334] "Generic (PLEG): container finished" podID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerID="0d281e2fb4275d320342055350ad8b02619345a27b14e142b0644ec0fbef89cc" exitCode=137 Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.236371 4652 generic.go:334] "Generic (PLEG): container finished" podID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerID="32da43783283983c795e30d81e4ab738dbe38a370c5717921c968cf9b542a648" exitCode=137 Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.236075 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84bb48cbc7-nw55m" event={"ID":"8b79d3c1-ccd2-454f-979d-67561bb7cfe5","Type":"ContainerDied","Data":"0d281e2fb4275d320342055350ad8b02619345a27b14e142b0644ec0fbef89cc"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.236529 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84bb48cbc7-nw55m" event={"ID":"8b79d3c1-ccd2-454f-979d-67561bb7cfe5","Type":"ContainerDied","Data":"32da43783283983c795e30d81e4ab738dbe38a370c5717921c968cf9b542a648"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.240340 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerStarted","Data":"0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.247247 4652 generic.go:334] "Generic (PLEG): container finished" podID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerID="81a38f6a597eb65f06ee0d1a27cf5ace01addd802c4ccca86938fd574bc77362" exitCode=137 Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.247267 4652 generic.go:334] "Generic (PLEG): container finished" podID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerID="9235f17f27e40459f8bc575435ac465e6ca3d02cad1a4b541f49f4638e4e7925" exitCode=137 Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.247311 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d8787bf9c-pxslp" event={"ID":"34e6630c-45d1-4105-9ce1-a22701d0231a","Type":"ContainerDied","Data":"81a38f6a597eb65f06ee0d1a27cf5ace01addd802c4ccca86938fd574bc77362"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.247335 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d8787bf9c-pxslp" event={"ID":"34e6630c-45d1-4105-9ce1-a22701d0231a","Type":"ContainerDied","Data":"9235f17f27e40459f8bc575435ac465e6ca3d02cad1a4b541f49f4638e4e7925"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.248929 4652 generic.go:334] "Generic (PLEG): container finished" podID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerID="1701b339c29bc1d513460590c57bd08dc0006707afedcd971813754b44af2018" exitCode=137 Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.248949 4652 generic.go:334] "Generic (PLEG): container finished" podID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerID="2affdee24e868032ec37bbabb80da0d75aa95d821419d6c7949bd48514e8ac22" exitCode=137 Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.248996 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6795c4fff7-6rkzk" event={"ID":"b190f7c2-4b39-46ac-a309-fdee7641c525","Type":"ContainerDied","Data":"1701b339c29bc1d513460590c57bd08dc0006707afedcd971813754b44af2018"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.249015 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6795c4fff7-6rkzk" event={"ID":"b190f7c2-4b39-46ac-a309-fdee7641c525","Type":"ContainerDied","Data":"2affdee24e868032ec37bbabb80da0d75aa95d821419d6c7949bd48514e8ac22"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.251707 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9657e858-3fdb-4e7d-973d-21d1a05592b2","Type":"ContainerStarted","Data":"815a7be03a17ac9063629e49effd8d9be59f58f5a9b2ccefa833b8bfa5679a15"} Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.251899 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.272388 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.272366831 podStartE2EDuration="3.272366831s" podCreationTimestamp="2025-12-05 05:45:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:09.267979606 +0000 UTC m=+1111.504709872" watchObservedRunningTime="2025-12-05 05:45:09.272366831 +0000 UTC m=+1111.509097098" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.382181 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.428136 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.440172 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543191 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-horizon-secret-key\") pod \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543231 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-scripts\") pod \"b190f7c2-4b39-46ac-a309-fdee7641c525\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543280 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-logs\") pod \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543349 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/34e6630c-45d1-4105-9ce1-a22701d0231a-horizon-secret-key\") pod \"34e6630c-45d1-4105-9ce1-a22701d0231a\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543366 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-scripts\") pod \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543397 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxhxv\" (UniqueName: \"kubernetes.io/projected/b190f7c2-4b39-46ac-a309-fdee7641c525-kube-api-access-fxhxv\") pod \"b190f7c2-4b39-46ac-a309-fdee7641c525\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543429 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-config-data\") pod \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543459 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b190f7c2-4b39-46ac-a309-fdee7641c525-horizon-secret-key\") pod \"b190f7c2-4b39-46ac-a309-fdee7641c525\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543480 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl47g\" (UniqueName: \"kubernetes.io/projected/34e6630c-45d1-4105-9ce1-a22701d0231a-kube-api-access-rl47g\") pod \"34e6630c-45d1-4105-9ce1-a22701d0231a\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543539 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-config-data\") pod \"34e6630c-45d1-4105-9ce1-a22701d0231a\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543580 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-config-data\") pod \"b190f7c2-4b39-46ac-a309-fdee7641c525\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543606 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b190f7c2-4b39-46ac-a309-fdee7641c525-logs\") pod \"b190f7c2-4b39-46ac-a309-fdee7641c525\" (UID: \"b190f7c2-4b39-46ac-a309-fdee7641c525\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543650 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k72cs\" (UniqueName: \"kubernetes.io/projected/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-kube-api-access-k72cs\") pod \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\" (UID: \"8b79d3c1-ccd2-454f-979d-67561bb7cfe5\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543689 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34e6630c-45d1-4105-9ce1-a22701d0231a-logs\") pod \"34e6630c-45d1-4105-9ce1-a22701d0231a\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.543723 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-scripts\") pod \"34e6630c-45d1-4105-9ce1-a22701d0231a\" (UID: \"34e6630c-45d1-4105-9ce1-a22701d0231a\") " Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.544317 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-logs" (OuterVolumeSpecName: "logs") pod "8b79d3c1-ccd2-454f-979d-67561bb7cfe5" (UID: "8b79d3c1-ccd2-454f-979d-67561bb7cfe5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.544512 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b190f7c2-4b39-46ac-a309-fdee7641c525-logs" (OuterVolumeSpecName: "logs") pod "b190f7c2-4b39-46ac-a309-fdee7641c525" (UID: "b190f7c2-4b39-46ac-a309-fdee7641c525"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.544869 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34e6630c-45d1-4105-9ce1-a22701d0231a-logs" (OuterVolumeSpecName: "logs") pod "34e6630c-45d1-4105-9ce1-a22701d0231a" (UID: "34e6630c-45d1-4105-9ce1-a22701d0231a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.549123 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e6630c-45d1-4105-9ce1-a22701d0231a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "34e6630c-45d1-4105-9ce1-a22701d0231a" (UID: "34e6630c-45d1-4105-9ce1-a22701d0231a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.549699 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "8b79d3c1-ccd2-454f-979d-67561bb7cfe5" (UID: "8b79d3c1-ccd2-454f-979d-67561bb7cfe5"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.550638 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b190f7c2-4b39-46ac-a309-fdee7641c525-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b190f7c2-4b39-46ac-a309-fdee7641c525" (UID: "b190f7c2-4b39-46ac-a309-fdee7641c525"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.550791 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-kube-api-access-k72cs" (OuterVolumeSpecName: "kube-api-access-k72cs") pod "8b79d3c1-ccd2-454f-979d-67561bb7cfe5" (UID: "8b79d3c1-ccd2-454f-979d-67561bb7cfe5"). InnerVolumeSpecName "kube-api-access-k72cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.551644 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b190f7c2-4b39-46ac-a309-fdee7641c525-kube-api-access-fxhxv" (OuterVolumeSpecName: "kube-api-access-fxhxv") pod "b190f7c2-4b39-46ac-a309-fdee7641c525" (UID: "b190f7c2-4b39-46ac-a309-fdee7641c525"). InnerVolumeSpecName "kube-api-access-fxhxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.552396 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34e6630c-45d1-4105-9ce1-a22701d0231a-kube-api-access-rl47g" (OuterVolumeSpecName: "kube-api-access-rl47g") pod "34e6630c-45d1-4105-9ce1-a22701d0231a" (UID: "34e6630c-45d1-4105-9ce1-a22701d0231a"). InnerVolumeSpecName "kube-api-access-rl47g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.570415 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-config-data" (OuterVolumeSpecName: "config-data") pod "34e6630c-45d1-4105-9ce1-a22701d0231a" (UID: "34e6630c-45d1-4105-9ce1-a22701d0231a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.576471 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-scripts" (OuterVolumeSpecName: "scripts") pod "b190f7c2-4b39-46ac-a309-fdee7641c525" (UID: "b190f7c2-4b39-46ac-a309-fdee7641c525"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.577745 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-scripts" (OuterVolumeSpecName: "scripts") pod "8b79d3c1-ccd2-454f-979d-67561bb7cfe5" (UID: "8b79d3c1-ccd2-454f-979d-67561bb7cfe5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.579092 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-scripts" (OuterVolumeSpecName: "scripts") pod "34e6630c-45d1-4105-9ce1-a22701d0231a" (UID: "34e6630c-45d1-4105-9ce1-a22701d0231a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.581234 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-config-data" (OuterVolumeSpecName: "config-data") pod "b190f7c2-4b39-46ac-a309-fdee7641c525" (UID: "b190f7c2-4b39-46ac-a309-fdee7641c525"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.582803 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-config-data" (OuterVolumeSpecName: "config-data") pod "8b79d3c1-ccd2-454f-979d-67561bb7cfe5" (UID: "8b79d3c1-ccd2-454f-979d-67561bb7cfe5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646480 4652 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646888 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646900 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646910 4652 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/34e6630c-45d1-4105-9ce1-a22701d0231a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646919 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646928 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxhxv\" (UniqueName: \"kubernetes.io/projected/b190f7c2-4b39-46ac-a309-fdee7641c525-kube-api-access-fxhxv\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646940 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646949 4652 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b190f7c2-4b39-46ac-a309-fdee7641c525-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646957 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl47g\" (UniqueName: \"kubernetes.io/projected/34e6630c-45d1-4105-9ce1-a22701d0231a-kube-api-access-rl47g\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646965 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646973 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b190f7c2-4b39-46ac-a309-fdee7641c525-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646983 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b190f7c2-4b39-46ac-a309-fdee7641c525-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646991 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k72cs\" (UniqueName: \"kubernetes.io/projected/8b79d3c1-ccd2-454f-979d-67561bb7cfe5-kube-api-access-k72cs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.646999 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34e6630c-45d1-4105-9ce1-a22701d0231a-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:09 crc kubenswrapper[4652]: I1205 05:45:09.647006 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/34e6630c-45d1-4105-9ce1-a22701d0231a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.020362 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7744f5c9d6-t75mf" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.159:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.159:8443: connect: connection refused" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.262062 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6795c4fff7-6rkzk" event={"ID":"b190f7c2-4b39-46ac-a309-fdee7641c525","Type":"ContainerDied","Data":"9ebbe2e5790107d8fbfc155ac6f7d341f8db57331aaa7786056df9a2f0eece1f"} Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.262117 4652 scope.go:117] "RemoveContainer" containerID="1701b339c29bc1d513460590c57bd08dc0006707afedcd971813754b44af2018" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.262112 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6795c4fff7-6rkzk" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.264689 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84bb48cbc7-nw55m" event={"ID":"8b79d3c1-ccd2-454f-979d-67561bb7cfe5","Type":"ContainerDied","Data":"8966da6b8c4e3f2a3f3a94adf1cf2fe67dbe90581822d241ab9f3c0a8a4b0c09"} Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.265037 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84bb48cbc7-nw55m" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.267682 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerStarted","Data":"7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91"} Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.273256 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d8787bf9c-pxslp" event={"ID":"34e6630c-45d1-4105-9ce1-a22701d0231a","Type":"ContainerDied","Data":"63bacf8f6fa09400a375b5278a1f986b99648319eccea16db905f2801ab5639f"} Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.273407 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d8787bf9c-pxslp" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.300342 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6795c4fff7-6rkzk"] Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.312599 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6795c4fff7-6rkzk"] Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.319613 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84bb48cbc7-nw55m"] Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.325038 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-84bb48cbc7-nw55m"] Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.331413 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7d8787bf9c-pxslp"] Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.338674 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7d8787bf9c-pxslp"] Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.418363 4652 scope.go:117] "RemoveContainer" containerID="2affdee24e868032ec37bbabb80da0d75aa95d821419d6c7949bd48514e8ac22" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.445474 4652 scope.go:117] "RemoveContainer" containerID="0d281e2fb4275d320342055350ad8b02619345a27b14e142b0644ec0fbef89cc" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.610328 4652 scope.go:117] "RemoveContainer" containerID="32da43783283983c795e30d81e4ab738dbe38a370c5717921c968cf9b542a648" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.632933 4652 scope.go:117] "RemoveContainer" containerID="81a38f6a597eb65f06ee0d1a27cf5ace01addd802c4ccca86938fd574bc77362" Dec 05 05:45:10 crc kubenswrapper[4652]: I1205 05:45:10.788628 4652 scope.go:117] "RemoveContainer" containerID="9235f17f27e40459f8bc575435ac465e6ca3d02cad1a4b541f49f4638e4e7925" Dec 05 05:45:11 crc kubenswrapper[4652]: I1205 05:45:11.286941 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerStarted","Data":"4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab"} Dec 05 05:45:11 crc kubenswrapper[4652]: I1205 05:45:11.776180 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 05:45:11 crc kubenswrapper[4652]: I1205 05:45:11.822801 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:11 crc kubenswrapper[4652]: I1205 05:45:11.847699 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:45:11 crc kubenswrapper[4652]: I1205 05:45:11.904063 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578598f949-nfsj7"] Dec 05 05:45:11 crc kubenswrapper[4652]: I1205 05:45:11.904250 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-578598f949-nfsj7" podUID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" containerName="dnsmasq-dns" containerID="cri-o://cf3caa178abc46f36d7fd9cd916042ff15942ac8fdb39995b977cb63bcbcd100" gracePeriod=10 Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.157775 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" path="/var/lib/kubelet/pods/34e6630c-45d1-4105-9ce1-a22701d0231a/volumes" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.159092 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" path="/var/lib/kubelet/pods/8b79d3c1-ccd2-454f-979d-67561bb7cfe5/volumes" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.160114 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" path="/var/lib/kubelet/pods/b190f7c2-4b39-46ac-a309-fdee7641c525/volumes" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.309222 4652 generic.go:334] "Generic (PLEG): container finished" podID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" containerID="cf3caa178abc46f36d7fd9cd916042ff15942ac8fdb39995b977cb63bcbcd100" exitCode=0 Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.309426 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerName="cinder-scheduler" containerID="cri-o://34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f" gracePeriod=30 Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.309717 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578598f949-nfsj7" event={"ID":"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5","Type":"ContainerDied","Data":"cf3caa178abc46f36d7fd9cd916042ff15942ac8fdb39995b977cb63bcbcd100"} Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.309995 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerName="probe" containerID="cri-o://81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa" gracePeriod=30 Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.371414 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.533907 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-nb\") pod \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.534142 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-svc\") pod \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.534285 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmls8\" (UniqueName: \"kubernetes.io/projected/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-kube-api-access-cmls8\") pod \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.534373 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-swift-storage-0\") pod \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.534474 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-sb\") pod \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.534526 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-config\") pod \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\" (UID: \"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5\") " Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.538683 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-kube-api-access-cmls8" (OuterVolumeSpecName: "kube-api-access-cmls8") pod "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" (UID: "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5"). InnerVolumeSpecName "kube-api-access-cmls8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.589665 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" (UID: "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.595541 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" (UID: "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.597805 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" (UID: "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.598439 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" (UID: "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.602635 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-config" (OuterVolumeSpecName: "config") pod "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" (UID: "ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.637656 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.637696 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.637710 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmls8\" (UniqueName: \"kubernetes.io/projected/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-kube-api-access-cmls8\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.637722 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.637731 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:12 crc kubenswrapper[4652]: I1205 05:45:12.637740 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.320845 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578598f949-nfsj7" event={"ID":"ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5","Type":"ContainerDied","Data":"811f14f23cd0a760d8369cb560ba21f4b294b36f9d51dc0a8266600dedb9fb01"} Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.321145 4652 scope.go:117] "RemoveContainer" containerID="cf3caa178abc46f36d7fd9cd916042ff15942ac8fdb39995b977cb63bcbcd100" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.321769 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578598f949-nfsj7" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.324048 4652 generic.go:334] "Generic (PLEG): container finished" podID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerID="81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa" exitCode=0 Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.324105 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c2352b28-244f-49c3-92fe-02b1a5ce33b2","Type":"ContainerDied","Data":"81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa"} Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.330528 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerStarted","Data":"a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5"} Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.330800 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.338580 4652 scope.go:117] "RemoveContainer" containerID="8b303bdf22b72383711531f134b7fcd5201475737fe6c3b49fadc32dc9906e1b" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.370324 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.002282692 podStartE2EDuration="6.370310294s" podCreationTimestamp="2025-12-05 05:45:07 +0000 UTC" firstStartedPulling="2025-12-05 05:45:08.082198443 +0000 UTC m=+1110.318928710" lastFinishedPulling="2025-12-05 05:45:12.450226044 +0000 UTC m=+1114.686956312" observedRunningTime="2025-12-05 05:45:13.35814404 +0000 UTC m=+1115.594874308" watchObservedRunningTime="2025-12-05 05:45:13.370310294 +0000 UTC m=+1115.607040561" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.373693 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578598f949-nfsj7"] Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.379381 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-578598f949-nfsj7"] Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.465359 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.465664 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.496858 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 05:45:13 crc kubenswrapper[4652]: I1205 05:45:13.502828 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.136006 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" path="/var/lib/kubelet/pods/ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5/volumes" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.342189 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.342245 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.449245 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.449285 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.474612 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.482283 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.662872 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.666276 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-75cb4c4d5b-gg8r8" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.714376 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.714419 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:14 crc kubenswrapper[4652]: I1205 05:45:14.715030 4652 scope.go:117] "RemoveContainer" containerID="7d19c23409283640cb1bd3a0d34c8d831faf4762b4108458c1b8b95e16face3c" Dec 05 05:45:14 crc kubenswrapper[4652]: E1205 05:45:14.715285 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(ca914a04-6a6f-4b20-af32-e0771a7dffa5)\"" pod="openstack/watcher-decision-engine-0" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" Dec 05 05:45:15 crc kubenswrapper[4652]: I1205 05:45:15.350539 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:15 crc kubenswrapper[4652]: I1205 05:45:15.350958 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:15 crc kubenswrapper[4652]: I1205 05:45:15.960197 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.049334 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.052767 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.113612 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-scripts\") pod \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.113794 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c2352b28-244f-49c3-92fe-02b1a5ce33b2-etc-machine-id\") pod \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.113845 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data-custom\") pod \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.113880 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data\") pod \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.113906 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2wf7\" (UniqueName: \"kubernetes.io/projected/c2352b28-244f-49c3-92fe-02b1a5ce33b2-kube-api-access-w2wf7\") pod \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.113917 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c2352b28-244f-49c3-92fe-02b1a5ce33b2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c2352b28-244f-49c3-92fe-02b1a5ce33b2" (UID: "c2352b28-244f-49c3-92fe-02b1a5ce33b2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.113946 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-combined-ca-bundle\") pod \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\" (UID: \"c2352b28-244f-49c3-92fe-02b1a5ce33b2\") " Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.114577 4652 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c2352b28-244f-49c3-92fe-02b1a5ce33b2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.123647 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-scripts" (OuterVolumeSpecName: "scripts") pod "c2352b28-244f-49c3-92fe-02b1a5ce33b2" (UID: "c2352b28-244f-49c3-92fe-02b1a5ce33b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.134181 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2352b28-244f-49c3-92fe-02b1a5ce33b2-kube-api-access-w2wf7" (OuterVolumeSpecName: "kube-api-access-w2wf7") pod "c2352b28-244f-49c3-92fe-02b1a5ce33b2" (UID: "c2352b28-244f-49c3-92fe-02b1a5ce33b2"). InnerVolumeSpecName "kube-api-access-w2wf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.134632 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c2352b28-244f-49c3-92fe-02b1a5ce33b2" (UID: "c2352b28-244f-49c3-92fe-02b1a5ce33b2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.180129 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2352b28-244f-49c3-92fe-02b1a5ce33b2" (UID: "c2352b28-244f-49c3-92fe-02b1a5ce33b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.220984 4652 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.221011 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2wf7\" (UniqueName: \"kubernetes.io/projected/c2352b28-244f-49c3-92fe-02b1a5ce33b2-kube-api-access-w2wf7\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.221023 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.221032 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.225851 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data" (OuterVolumeSpecName: "config-data") pod "c2352b28-244f-49c3-92fe-02b1a5ce33b2" (UID: "c2352b28-244f-49c3-92fe-02b1a5ce33b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.322981 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2352b28-244f-49c3-92fe-02b1a5ce33b2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.366861 4652 generic.go:334] "Generic (PLEG): container finished" podID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerID="34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f" exitCode=0 Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.367619 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.369401 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c2352b28-244f-49c3-92fe-02b1a5ce33b2","Type":"ContainerDied","Data":"34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f"} Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.369437 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c2352b28-244f-49c3-92fe-02b1a5ce33b2","Type":"ContainerDied","Data":"cc38f94566964b290b0eda608289b5852eef6162e4fa339b9dd16e96e9d9edd2"} Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.369455 4652 scope.go:117] "RemoveContainer" containerID="81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.398693 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.405479 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.416049 4652 scope.go:117] "RemoveContainer" containerID="34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.429329 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430081 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" containerName="dnsmasq-dns" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430107 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" containerName="dnsmasq-dns" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430122 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerName="cinder-scheduler" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430129 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerName="cinder-scheduler" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430149 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430155 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430165 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430170 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430184 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430189 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430197 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430202 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430213 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430220 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430230 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerName="probe" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430236 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerName="probe" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430265 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430270 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.430286 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" containerName="init" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430291 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" containerName="init" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430568 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430587 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430597 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b190f7c2-4b39-46ac-a309-fdee7641c525" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430609 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerName="cinder-scheduler" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430622 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="34e6630c-45d1-4105-9ce1-a22701d0231a" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430630 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab9a77c7-a8c7-48bb-8d69-0eeaea574ac5" containerName="dnsmasq-dns" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430635 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerName="horizon" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430654 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b79d3c1-ccd2-454f-979d-67561bb7cfe5" containerName="horizon-log" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.430675 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" containerName="probe" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.436141 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.436288 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.446370 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.454926 4652 scope.go:117] "RemoveContainer" containerID="81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.459867 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa\": container with ID starting with 81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa not found: ID does not exist" containerID="81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.459937 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa"} err="failed to get container status \"81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa\": rpc error: code = NotFound desc = could not find container \"81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa\": container with ID starting with 81fb48a58636f81388f6125c86aceb01b21971c2e8e43edc1ac8b4ecc0a3feaa not found: ID does not exist" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.459963 4652 scope.go:117] "RemoveContainer" containerID="34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f" Dec 05 05:45:16 crc kubenswrapper[4652]: E1205 05:45:16.460461 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f\": container with ID starting with 34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f not found: ID does not exist" containerID="34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.460486 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f"} err="failed to get container status \"34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f\": rpc error: code = NotFound desc = could not find container \"34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f\": container with ID starting with 34c24d2d2155f341651a18a9bee4cb84fb7aec45306e69c550c88073ef7c8f4f not found: ID does not exist" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.527935 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.528110 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.528134 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.528165 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb5qs\" (UniqueName: \"kubernetes.io/projected/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-kube-api-access-fb5qs\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.528213 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.528232 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.630580 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.630638 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.630675 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb5qs\" (UniqueName: \"kubernetes.io/projected/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-kube-api-access-fb5qs\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.630798 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.630833 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.630919 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.630936 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.634325 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.634764 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.635380 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.636000 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.645025 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb5qs\" (UniqueName: \"kubernetes.io/projected/b7bdd3b9-536b-4099-9f8c-753eb2725f8b-kube-api-access-fb5qs\") pod \"cinder-scheduler-0\" (UID: \"b7bdd3b9-536b-4099-9f8c-753eb2725f8b\") " pod="openstack/cinder-scheduler-0" Dec 05 05:45:16 crc kubenswrapper[4652]: I1205 05:45:16.763866 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 05:45:17 crc kubenswrapper[4652]: I1205 05:45:17.140114 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-c7bb6b57-bpms4" Dec 05 05:45:17 crc kubenswrapper[4652]: I1205 05:45:17.311317 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 05:45:17 crc kubenswrapper[4652]: I1205 05:45:17.379669 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b7bdd3b9-536b-4099-9f8c-753eb2725f8b","Type":"ContainerStarted","Data":"a9b078b71f5271a6df80c511e548a55f7337308d5083bcc25c0eb15f5c180ac2"} Dec 05 05:45:17 crc kubenswrapper[4652]: I1205 05:45:17.382312 4652 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 05:45:17 crc kubenswrapper[4652]: I1205 05:45:17.382473 4652 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 05:45:17 crc kubenswrapper[4652]: I1205 05:45:17.452601 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:17 crc kubenswrapper[4652]: I1205 05:45:17.457100 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:18 crc kubenswrapper[4652]: I1205 05:45:18.143355 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2352b28-244f-49c3-92fe-02b1a5ce33b2" path="/var/lib/kubelet/pods/c2352b28-244f-49c3-92fe-02b1a5ce33b2/volumes" Dec 05 05:45:18 crc kubenswrapper[4652]: I1205 05:45:18.394849 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b7bdd3b9-536b-4099-9f8c-753eb2725f8b","Type":"ContainerStarted","Data":"5626d2a190f7641a4457b4f5731e0cf2b60c8a438a92fad8238d8fa04c463093"} Dec 05 05:45:18 crc kubenswrapper[4652]: I1205 05:45:18.570161 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 05:45:19 crc kubenswrapper[4652]: I1205 05:45:19.411222 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b7bdd3b9-536b-4099-9f8c-753eb2725f8b","Type":"ContainerStarted","Data":"07e88d4e2e54b7451535f201cb5d52087faeaa56595dc0b033875bd352d8237d"} Dec 05 05:45:19 crc kubenswrapper[4652]: I1205 05:45:19.438903 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.4388587250000002 podStartE2EDuration="3.438858725s" podCreationTimestamp="2025-12-05 05:45:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:19.434599411 +0000 UTC m=+1121.671329678" watchObservedRunningTime="2025-12-05 05:45:19.438858725 +0000 UTC m=+1121.675588992" Dec 05 05:45:20 crc kubenswrapper[4652]: I1205 05:45:20.020538 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7744f5c9d6-t75mf" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.159:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.159:8443: connect: connection refused" Dec 05 05:45:21 crc kubenswrapper[4652]: I1205 05:45:21.509436 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:21 crc kubenswrapper[4652]: I1205 05:45:21.509852 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="proxy-httpd" containerID="cri-o://a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5" gracePeriod=30 Dec 05 05:45:21 crc kubenswrapper[4652]: I1205 05:45:21.509870 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="sg-core" containerID="cri-o://4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab" gracePeriod=30 Dec 05 05:45:21 crc kubenswrapper[4652]: I1205 05:45:21.509914 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="ceilometer-notification-agent" containerID="cri-o://7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91" gracePeriod=30 Dec 05 05:45:21 crc kubenswrapper[4652]: I1205 05:45:21.509819 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="ceilometer-central-agent" containerID="cri-o://0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6" gracePeriod=30 Dec 05 05:45:21 crc kubenswrapper[4652]: I1205 05:45:21.764141 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.012981 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-65dffd4ccf-cqtxw"] Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.014377 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.016475 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.017156 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.017291 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.023606 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-65dffd4ccf-cqtxw"] Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.079367 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-public-tls-certs\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.079402 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2txgv\" (UniqueName: \"kubernetes.io/projected/92728b29-e7ee-4aa6-b072-10c3abc0e22a-kube-api-access-2txgv\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.079466 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92728b29-e7ee-4aa6-b072-10c3abc0e22a-log-httpd\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.079687 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-internal-tls-certs\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.079814 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-config-data\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.079978 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92728b29-e7ee-4aa6-b072-10c3abc0e22a-run-httpd\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.080099 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/92728b29-e7ee-4aa6-b072-10c3abc0e22a-etc-swift\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.080130 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-combined-ca-bundle\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.162610 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.163860 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.167851 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.167889 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.168147 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-k8g2r" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.170760 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.181585 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92728b29-e7ee-4aa6-b072-10c3abc0e22a-run-httpd\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.181645 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/92728b29-e7ee-4aa6-b072-10c3abc0e22a-etc-swift\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.181661 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-combined-ca-bundle\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.181684 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-public-tls-certs\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.181705 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2txgv\" (UniqueName: \"kubernetes.io/projected/92728b29-e7ee-4aa6-b072-10c3abc0e22a-kube-api-access-2txgv\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.181748 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92728b29-e7ee-4aa6-b072-10c3abc0e22a-log-httpd\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.181798 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-internal-tls-certs\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.181844 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-config-data\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.186921 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-config-data\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.187179 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92728b29-e7ee-4aa6-b072-10c3abc0e22a-run-httpd\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.189989 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/92728b29-e7ee-4aa6-b072-10c3abc0e22a-etc-swift\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.190624 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92728b29-e7ee-4aa6-b072-10c3abc0e22a-log-httpd\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.191390 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-combined-ca-bundle\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.193793 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-public-tls-certs\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.194456 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92728b29-e7ee-4aa6-b072-10c3abc0e22a-internal-tls-certs\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.203531 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2txgv\" (UniqueName: \"kubernetes.io/projected/92728b29-e7ee-4aa6-b072-10c3abc0e22a-kube-api-access-2txgv\") pod \"swift-proxy-65dffd4ccf-cqtxw\" (UID: \"92728b29-e7ee-4aa6-b072-10c3abc0e22a\") " pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.283942 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-openstack-config\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.284150 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-openstack-config-secret\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.284181 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.284207 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78tjf\" (UniqueName: \"kubernetes.io/projected/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-kube-api-access-78tjf\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.328444 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.386250 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-openstack-config-secret\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.386494 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.386534 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78tjf\" (UniqueName: \"kubernetes.io/projected/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-kube-api-access-78tjf\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.386733 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-openstack-config\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.387463 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-openstack-config\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.390394 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-openstack-config-secret\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.394318 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.405864 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78tjf\" (UniqueName: \"kubernetes.io/projected/7ee40e5d-1fe1-4d60-ac89-85beb2755efa-kube-api-access-78tjf\") pod \"openstackclient\" (UID: \"7ee40e5d-1fe1-4d60-ac89-85beb2755efa\") " pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.436905 4652 generic.go:334] "Generic (PLEG): container finished" podID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerID="a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5" exitCode=0 Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.436931 4652 generic.go:334] "Generic (PLEG): container finished" podID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerID="4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab" exitCode=2 Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.436938 4652 generic.go:334] "Generic (PLEG): container finished" podID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerID="0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6" exitCode=0 Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.436959 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerDied","Data":"a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5"} Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.436983 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerDied","Data":"4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab"} Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.436994 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerDied","Data":"0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6"} Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.478048 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.786177 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-65dffd4ccf-cqtxw"] Dec 05 05:45:22 crc kubenswrapper[4652]: I1205 05:45:22.908850 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.355719 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.446026 4652 generic.go:334] "Generic (PLEG): container finished" podID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerID="7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91" exitCode=0 Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.446081 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.446089 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerDied","Data":"7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91"} Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.446123 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3affc2d-d5ee-40f8-8f91-32097f3393dc","Type":"ContainerDied","Data":"da95b7ce592f9b8f087e171bec98dce6c626af23169615162a988bba3db5c622"} Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.446142 4652 scope.go:117] "RemoveContainer" containerID="a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.447346 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7ee40e5d-1fe1-4d60-ac89-85beb2755efa","Type":"ContainerStarted","Data":"c5275c37a3765efc884ce6baa74fe1a820e3130807af8bbcb3dd76f42c395f07"} Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.449342 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" event={"ID":"92728b29-e7ee-4aa6-b072-10c3abc0e22a","Type":"ContainerStarted","Data":"aa5462650b535ac4d1670642a868279f615d2f5631e955340f7ff59f7951ed01"} Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.449369 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" event={"ID":"92728b29-e7ee-4aa6-b072-10c3abc0e22a","Type":"ContainerStarted","Data":"dec7d1752a3fbd913fc5c7da274b4aa027f0c69c09db4b708060dba47351b516"} Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.449379 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" event={"ID":"92728b29-e7ee-4aa6-b072-10c3abc0e22a","Type":"ContainerStarted","Data":"a8d2ce85f4615fab57d7e71fbb0f411490b9d224092e4668ec2489ca411a53e9"} Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.450519 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.453930 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.462183 4652 scope.go:117] "RemoveContainer" containerID="4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.485522 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" podStartSLOduration=2.485487322 podStartE2EDuration="2.485487322s" podCreationTimestamp="2025-12-05 05:45:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:23.475433588 +0000 UTC m=+1125.712163856" watchObservedRunningTime="2025-12-05 05:45:23.485487322 +0000 UTC m=+1125.722217589" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.493433 4652 scope.go:117] "RemoveContainer" containerID="7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.509993 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-config-data\") pod \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.510166 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-sg-core-conf-yaml\") pod \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.510489 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgqwb\" (UniqueName: \"kubernetes.io/projected/d3affc2d-d5ee-40f8-8f91-32097f3393dc-kube-api-access-rgqwb\") pod \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.510529 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-scripts\") pod \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.510710 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-log-httpd\") pod \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.510738 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-combined-ca-bundle\") pod \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.510811 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-run-httpd\") pod \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\" (UID: \"d3affc2d-d5ee-40f8-8f91-32097f3393dc\") " Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.511588 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d3affc2d-d5ee-40f8-8f91-32097f3393dc" (UID: "d3affc2d-d5ee-40f8-8f91-32097f3393dc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.511615 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d3affc2d-d5ee-40f8-8f91-32097f3393dc" (UID: "d3affc2d-d5ee-40f8-8f91-32097f3393dc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.513174 4652 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.513193 4652 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3affc2d-d5ee-40f8-8f91-32097f3393dc-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.514875 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3affc2d-d5ee-40f8-8f91-32097f3393dc-kube-api-access-rgqwb" (OuterVolumeSpecName: "kube-api-access-rgqwb") pod "d3affc2d-d5ee-40f8-8f91-32097f3393dc" (UID: "d3affc2d-d5ee-40f8-8f91-32097f3393dc"). InnerVolumeSpecName "kube-api-access-rgqwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.515724 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-scripts" (OuterVolumeSpecName: "scripts") pod "d3affc2d-d5ee-40f8-8f91-32097f3393dc" (UID: "d3affc2d-d5ee-40f8-8f91-32097f3393dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.516515 4652 scope.go:117] "RemoveContainer" containerID="0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.533889 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d3affc2d-d5ee-40f8-8f91-32097f3393dc" (UID: "d3affc2d-d5ee-40f8-8f91-32097f3393dc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.535706 4652 scope.go:117] "RemoveContainer" containerID="a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5" Dec 05 05:45:23 crc kubenswrapper[4652]: E1205 05:45:23.536032 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5\": container with ID starting with a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5 not found: ID does not exist" containerID="a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.536067 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5"} err="failed to get container status \"a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5\": rpc error: code = NotFound desc = could not find container \"a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5\": container with ID starting with a7de56e62f3ca35dc961813a9d3786a7d8eec2787e4ee238c329eafea27738d5 not found: ID does not exist" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.536090 4652 scope.go:117] "RemoveContainer" containerID="4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab" Dec 05 05:45:23 crc kubenswrapper[4652]: E1205 05:45:23.536344 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab\": container with ID starting with 4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab not found: ID does not exist" containerID="4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.536372 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab"} err="failed to get container status \"4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab\": rpc error: code = NotFound desc = could not find container \"4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab\": container with ID starting with 4eac55b3aef9883a7d15c33971b214eb027b50c6505a685881b9a330c16fa2ab not found: ID does not exist" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.536391 4652 scope.go:117] "RemoveContainer" containerID="7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91" Dec 05 05:45:23 crc kubenswrapper[4652]: E1205 05:45:23.536697 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91\": container with ID starting with 7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91 not found: ID does not exist" containerID="7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.536719 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91"} err="failed to get container status \"7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91\": rpc error: code = NotFound desc = could not find container \"7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91\": container with ID starting with 7af1c6d2709536d3580b9e2fd325e6edb44cb0bfa94a6f92cd8ecec630861b91 not found: ID does not exist" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.536733 4652 scope.go:117] "RemoveContainer" containerID="0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6" Dec 05 05:45:23 crc kubenswrapper[4652]: E1205 05:45:23.536960 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6\": container with ID starting with 0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6 not found: ID does not exist" containerID="0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.536984 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6"} err="failed to get container status \"0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6\": rpc error: code = NotFound desc = could not find container \"0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6\": container with ID starting with 0e9bea6aa69dcb859663089c03a9b52e33cfeed049c7732e3a30a893462b74f6 not found: ID does not exist" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.568956 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3affc2d-d5ee-40f8-8f91-32097f3393dc" (UID: "d3affc2d-d5ee-40f8-8f91-32097f3393dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.582168 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-config-data" (OuterVolumeSpecName: "config-data") pod "d3affc2d-d5ee-40f8-8f91-32097f3393dc" (UID: "d3affc2d-d5ee-40f8-8f91-32097f3393dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.615621 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.615662 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.615672 4652 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.615680 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgqwb\" (UniqueName: \"kubernetes.io/projected/d3affc2d-d5ee-40f8-8f91-32097f3393dc-kube-api-access-rgqwb\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.615690 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3affc2d-d5ee-40f8-8f91-32097f3393dc-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.773391 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.780312 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793070 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:23 crc kubenswrapper[4652]: E1205 05:45:23.793581 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="proxy-httpd" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793596 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="proxy-httpd" Dec 05 05:45:23 crc kubenswrapper[4652]: E1205 05:45:23.793636 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="sg-core" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793642 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="sg-core" Dec 05 05:45:23 crc kubenswrapper[4652]: E1205 05:45:23.793650 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="ceilometer-notification-agent" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793657 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="ceilometer-notification-agent" Dec 05 05:45:23 crc kubenswrapper[4652]: E1205 05:45:23.793667 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="ceilometer-central-agent" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793672 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="ceilometer-central-agent" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793851 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="proxy-httpd" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793870 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="ceilometer-notification-agent" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793895 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="sg-core" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.793908 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" containerName="ceilometer-central-agent" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.795539 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.797242 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.797669 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.802476 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.920629 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-scripts\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.920724 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-log-httpd\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.920836 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-config-data\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.920895 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-run-httpd\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.920976 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.921004 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:23 crc kubenswrapper[4652]: I1205 05:45:23.921021 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg79b\" (UniqueName: \"kubernetes.io/projected/2116de3c-a80b-442e-8de5-a4a8381e345e-kube-api-access-lg79b\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.022730 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-config-data\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.022827 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-run-httpd\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.022919 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.022948 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.022968 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg79b\" (UniqueName: \"kubernetes.io/projected/2116de3c-a80b-442e-8de5-a4a8381e345e-kube-api-access-lg79b\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.023048 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-scripts\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.023135 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-log-httpd\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.023332 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-run-httpd\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.024163 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-log-httpd\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.030332 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.030537 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.033633 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-config-data\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.034121 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-scripts\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.058693 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg79b\" (UniqueName: \"kubernetes.io/projected/2116de3c-a80b-442e-8de5-a4a8381e345e-kube-api-access-lg79b\") pod \"ceilometer-0\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.067639 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-rjhbt"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.068840 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.076775 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-rjhbt"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.112057 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.125671 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ngxl\" (UniqueName: \"kubernetes.io/projected/6eb04f8f-6137-4976-b83c-61694b5e34bb-kube-api-access-7ngxl\") pod \"nova-api-db-create-rjhbt\" (UID: \"6eb04f8f-6137-4976-b83c-61694b5e34bb\") " pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.125717 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb04f8f-6137-4976-b83c-61694b5e34bb-operator-scripts\") pod \"nova-api-db-create-rjhbt\" (UID: \"6eb04f8f-6137-4976-b83c-61694b5e34bb\") " pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.145217 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3affc2d-d5ee-40f8-8f91-32097f3393dc" path="/var/lib/kubelet/pods/d3affc2d-d5ee-40f8-8f91-32097f3393dc/volumes" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.164181 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-tj9c4"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.168615 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.186593 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-tj9c4"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.226664 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ngxl\" (UniqueName: \"kubernetes.io/projected/6eb04f8f-6137-4976-b83c-61694b5e34bb-kube-api-access-7ngxl\") pod \"nova-api-db-create-rjhbt\" (UID: \"6eb04f8f-6137-4976-b83c-61694b5e34bb\") " pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.226699 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb04f8f-6137-4976-b83c-61694b5e34bb-operator-scripts\") pod \"nova-api-db-create-rjhbt\" (UID: \"6eb04f8f-6137-4976-b83c-61694b5e34bb\") " pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.226907 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sgtz\" (UniqueName: \"kubernetes.io/projected/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-kube-api-access-4sgtz\") pod \"nova-cell0-db-create-tj9c4\" (UID: \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\") " pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.226927 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-operator-scripts\") pod \"nova-cell0-db-create-tj9c4\" (UID: \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\") " pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.229549 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb04f8f-6137-4976-b83c-61694b5e34bb-operator-scripts\") pod \"nova-api-db-create-rjhbt\" (UID: \"6eb04f8f-6137-4976-b83c-61694b5e34bb\") " pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.247657 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ngxl\" (UniqueName: \"kubernetes.io/projected/6eb04f8f-6137-4976-b83c-61694b5e34bb-kube-api-access-7ngxl\") pod \"nova-api-db-create-rjhbt\" (UID: \"6eb04f8f-6137-4976-b83c-61694b5e34bb\") " pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.275212 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-7ac3-account-create-update-mp544"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.276447 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.279321 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.287597 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-7ac3-account-create-update-mp544"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.329860 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sgtz\" (UniqueName: \"kubernetes.io/projected/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-kube-api-access-4sgtz\") pod \"nova-cell0-db-create-tj9c4\" (UID: \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\") " pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.329910 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-operator-scripts\") pod \"nova-cell0-db-create-tj9c4\" (UID: \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\") " pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.331057 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-operator-scripts\") pod \"nova-cell0-db-create-tj9c4\" (UID: \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\") " pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.347678 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sgtz\" (UniqueName: \"kubernetes.io/projected/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-kube-api-access-4sgtz\") pod \"nova-cell0-db-create-tj9c4\" (UID: \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\") " pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.375546 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-hg5sm"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.377148 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.391989 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-hg5sm"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.433618 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzz9j\" (UniqueName: \"kubernetes.io/projected/99f60875-ac12-428f-9992-29473a3dfb62-kube-api-access-tzz9j\") pod \"nova-cell1-db-create-hg5sm\" (UID: \"99f60875-ac12-428f-9992-29473a3dfb62\") " pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.433663 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zgzl\" (UniqueName: \"kubernetes.io/projected/d187d4b2-4a72-4501-bdfc-dcf4808060f5-kube-api-access-8zgzl\") pod \"nova-api-7ac3-account-create-update-mp544\" (UID: \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\") " pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.433831 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f60875-ac12-428f-9992-29473a3dfb62-operator-scripts\") pod \"nova-cell1-db-create-hg5sm\" (UID: \"99f60875-ac12-428f-9992-29473a3dfb62\") " pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.434035 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d187d4b2-4a72-4501-bdfc-dcf4808060f5-operator-scripts\") pod \"nova-api-7ac3-account-create-update-mp544\" (UID: \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\") " pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.483712 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-d224-account-create-update-8fhpr"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.484847 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.486626 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.492111 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d224-account-create-update-8fhpr"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.511821 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.526047 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.535290 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d187d4b2-4a72-4501-bdfc-dcf4808060f5-operator-scripts\") pod \"nova-api-7ac3-account-create-update-mp544\" (UID: \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\") " pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.535429 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzz9j\" (UniqueName: \"kubernetes.io/projected/99f60875-ac12-428f-9992-29473a3dfb62-kube-api-access-tzz9j\") pod \"nova-cell1-db-create-hg5sm\" (UID: \"99f60875-ac12-428f-9992-29473a3dfb62\") " pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.535466 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zgzl\" (UniqueName: \"kubernetes.io/projected/d187d4b2-4a72-4501-bdfc-dcf4808060f5-kube-api-access-8zgzl\") pod \"nova-api-7ac3-account-create-update-mp544\" (UID: \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\") " pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.535517 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrhkf\" (UniqueName: \"kubernetes.io/projected/519311f0-3163-4215-a7b8-cf7302c9e8f8-kube-api-access-lrhkf\") pod \"nova-cell0-d224-account-create-update-8fhpr\" (UID: \"519311f0-3163-4215-a7b8-cf7302c9e8f8\") " pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.535539 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519311f0-3163-4215-a7b8-cf7302c9e8f8-operator-scripts\") pod \"nova-cell0-d224-account-create-update-8fhpr\" (UID: \"519311f0-3163-4215-a7b8-cf7302c9e8f8\") " pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.535573 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f60875-ac12-428f-9992-29473a3dfb62-operator-scripts\") pod \"nova-cell1-db-create-hg5sm\" (UID: \"99f60875-ac12-428f-9992-29473a3dfb62\") " pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.536548 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d187d4b2-4a72-4501-bdfc-dcf4808060f5-operator-scripts\") pod \"nova-api-7ac3-account-create-update-mp544\" (UID: \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\") " pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.536915 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f60875-ac12-428f-9992-29473a3dfb62-operator-scripts\") pod \"nova-cell1-db-create-hg5sm\" (UID: \"99f60875-ac12-428f-9992-29473a3dfb62\") " pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.553780 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzz9j\" (UniqueName: \"kubernetes.io/projected/99f60875-ac12-428f-9992-29473a3dfb62-kube-api-access-tzz9j\") pod \"nova-cell1-db-create-hg5sm\" (UID: \"99f60875-ac12-428f-9992-29473a3dfb62\") " pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.555172 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zgzl\" (UniqueName: \"kubernetes.io/projected/d187d4b2-4a72-4501-bdfc-dcf4808060f5-kube-api-access-8zgzl\") pod \"nova-api-7ac3-account-create-update-mp544\" (UID: \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\") " pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.594443 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.639982 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrhkf\" (UniqueName: \"kubernetes.io/projected/519311f0-3163-4215-a7b8-cf7302c9e8f8-kube-api-access-lrhkf\") pod \"nova-cell0-d224-account-create-update-8fhpr\" (UID: \"519311f0-3163-4215-a7b8-cf7302c9e8f8\") " pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.640020 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519311f0-3163-4215-a7b8-cf7302c9e8f8-operator-scripts\") pod \"nova-cell0-d224-account-create-update-8fhpr\" (UID: \"519311f0-3163-4215-a7b8-cf7302c9e8f8\") " pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.641546 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519311f0-3163-4215-a7b8-cf7302c9e8f8-operator-scripts\") pod \"nova-cell0-d224-account-create-update-8fhpr\" (UID: \"519311f0-3163-4215-a7b8-cf7302c9e8f8\") " pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.665349 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.665982 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrhkf\" (UniqueName: \"kubernetes.io/projected/519311f0-3163-4215-a7b8-cf7302c9e8f8-kube-api-access-lrhkf\") pod \"nova-cell0-d224-account-create-update-8fhpr\" (UID: \"519311f0-3163-4215-a7b8-cf7302c9e8f8\") " pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.681159 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-0b21-account-create-update-8zzvd"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.682214 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.684305 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.701409 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-0b21-account-create-update-8zzvd"] Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.711976 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.741639 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-488zp\" (UniqueName: \"kubernetes.io/projected/a19b09de-90b9-4718-8f0b-016f84266f36-kube-api-access-488zp\") pod \"nova-cell1-0b21-account-create-update-8zzvd\" (UID: \"a19b09de-90b9-4718-8f0b-016f84266f36\") " pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.741959 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19b09de-90b9-4718-8f0b-016f84266f36-operator-scripts\") pod \"nova-cell1-0b21-account-create-update-8zzvd\" (UID: \"a19b09de-90b9-4718-8f0b-016f84266f36\") " pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.807820 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.846178 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19b09de-90b9-4718-8f0b-016f84266f36-operator-scripts\") pod \"nova-cell1-0b21-account-create-update-8zzvd\" (UID: \"a19b09de-90b9-4718-8f0b-016f84266f36\") " pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.846252 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-488zp\" (UniqueName: \"kubernetes.io/projected/a19b09de-90b9-4718-8f0b-016f84266f36-kube-api-access-488zp\") pod \"nova-cell1-0b21-account-create-update-8zzvd\" (UID: \"a19b09de-90b9-4718-8f0b-016f84266f36\") " pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.847140 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19b09de-90b9-4718-8f0b-016f84266f36-operator-scripts\") pod \"nova-cell1-0b21-account-create-update-8zzvd\" (UID: \"a19b09de-90b9-4718-8f0b-016f84266f36\") " pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:24 crc kubenswrapper[4652]: I1205 05:45:24.866124 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-488zp\" (UniqueName: \"kubernetes.io/projected/a19b09de-90b9-4718-8f0b-016f84266f36-kube-api-access-488zp\") pod \"nova-cell1-0b21-account-create-update-8zzvd\" (UID: \"a19b09de-90b9-4718-8f0b-016f84266f36\") " pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.008768 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-rjhbt"] Dec 05 05:45:25 crc kubenswrapper[4652]: W1205 05:45:25.045583 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7620a38c_f020_46b9_b5b7_34e4d3a0f96b.slice/crio-099e7d286e06e975aa7e04d994382927c6029ff7a184d1c5cab74b8447362729 WatchSource:0}: Error finding container 099e7d286e06e975aa7e04d994382927c6029ff7a184d1c5cab74b8447362729: Status 404 returned error can't find the container with id 099e7d286e06e975aa7e04d994382927c6029ff7a184d1c5cab74b8447362729 Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.050883 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.057186 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-tj9c4"] Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.185588 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-7ac3-account-create-update-mp544"] Dec 05 05:45:25 crc kubenswrapper[4652]: W1205 05:45:25.192974 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99f60875_ac12_428f_9992_29473a3dfb62.slice/crio-a461bd9756fea324a3aefe01bc8c9ef164180f99d774bf18dd87c2b84c3c650a WatchSource:0}: Error finding container a461bd9756fea324a3aefe01bc8c9ef164180f99d774bf18dd87c2b84c3c650a: Status 404 returned error can't find the container with id a461bd9756fea324a3aefe01bc8c9ef164180f99d774bf18dd87c2b84c3c650a Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.197317 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-hg5sm"] Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.354080 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d224-account-create-update-8fhpr"] Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.479665 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-0b21-account-create-update-8zzvd"] Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.493288 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerStarted","Data":"422e021ff3124358e7a568e0fb33a941656a158cfb2007cc1da631ef79333426"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.496252 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d224-account-create-update-8fhpr" event={"ID":"519311f0-3163-4215-a7b8-cf7302c9e8f8","Type":"ContainerStarted","Data":"023f2663410f258974c28e1bac547c69856628bda315fa6228f66af8148bc256"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.498346 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hg5sm" event={"ID":"99f60875-ac12-428f-9992-29473a3dfb62","Type":"ContainerStarted","Data":"5970cba1ccb5e1474abcdb6fe4ea8b97113ff5d8cd118999bc307f0761a8d80f"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.498405 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hg5sm" event={"ID":"99f60875-ac12-428f-9992-29473a3dfb62","Type":"ContainerStarted","Data":"a461bd9756fea324a3aefe01bc8c9ef164180f99d774bf18dd87c2b84c3c650a"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.501800 4652 generic.go:334] "Generic (PLEG): container finished" podID="7620a38c-f020-46b9-b5b7-34e4d3a0f96b" containerID="50512b1fb91f5a6515acdf4ab562dc184bf24b33aa90ab89026328fe0db1b95f" exitCode=0 Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.501860 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tj9c4" event={"ID":"7620a38c-f020-46b9-b5b7-34e4d3a0f96b","Type":"ContainerDied","Data":"50512b1fb91f5a6515acdf4ab562dc184bf24b33aa90ab89026328fe0db1b95f"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.501875 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tj9c4" event={"ID":"7620a38c-f020-46b9-b5b7-34e4d3a0f96b","Type":"ContainerStarted","Data":"099e7d286e06e975aa7e04d994382927c6029ff7a184d1c5cab74b8447362729"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.504458 4652 generic.go:334] "Generic (PLEG): container finished" podID="6eb04f8f-6137-4976-b83c-61694b5e34bb" containerID="18e73cca34687422e4ace6d76a079a2aa54f555c15fb566b0a6ebc0c74f5bb24" exitCode=0 Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.504537 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-rjhbt" event={"ID":"6eb04f8f-6137-4976-b83c-61694b5e34bb","Type":"ContainerDied","Data":"18e73cca34687422e4ace6d76a079a2aa54f555c15fb566b0a6ebc0c74f5bb24"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.504579 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-rjhbt" event={"ID":"6eb04f8f-6137-4976-b83c-61694b5e34bb","Type":"ContainerStarted","Data":"6e444bac37b3b7d148dea00705a1927fd77894a6f6170a7784d4a10e061041b7"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.510917 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-hg5sm" podStartSLOduration=1.5109066420000001 podStartE2EDuration="1.510906642s" podCreationTimestamp="2025-12-05 05:45:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:25.50981957 +0000 UTC m=+1127.746549836" watchObservedRunningTime="2025-12-05 05:45:25.510906642 +0000 UTC m=+1127.747636909" Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.512435 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-7ac3-account-create-update-mp544" event={"ID":"d187d4b2-4a72-4501-bdfc-dcf4808060f5","Type":"ContainerStarted","Data":"d678705d76ec3eb0fc863b1d1c2025e9489f0b7ed99b478050eab495e14781a8"} Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.512476 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-7ac3-account-create-update-mp544" event={"ID":"d187d4b2-4a72-4501-bdfc-dcf4808060f5","Type":"ContainerStarted","Data":"fa2a4c4562371d56b707c39ee3ab5069003a39be12d2ac24bdc05deeacfc45dd"} Dec 05 05:45:25 crc kubenswrapper[4652]: W1205 05:45:25.542872 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda19b09de_90b9_4718_8f0b_016f84266f36.slice/crio-9a5db81d66eed29a2be0e4d5752a32512e30802a0266ee2a3b4f0d6e7e5a2b3b WatchSource:0}: Error finding container 9a5db81d66eed29a2be0e4d5752a32512e30802a0266ee2a3b4f0d6e7e5a2b3b: Status 404 returned error can't find the container with id 9a5db81d66eed29a2be0e4d5752a32512e30802a0266ee2a3b4f0d6e7e5a2b3b Dec 05 05:45:25 crc kubenswrapper[4652]: I1205 05:45:25.558901 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-7ac3-account-create-update-mp544" podStartSLOduration=1.558884497 podStartE2EDuration="1.558884497s" podCreationTimestamp="2025-12-05 05:45:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:25.541155029 +0000 UTC m=+1127.777885296" watchObservedRunningTime="2025-12-05 05:45:25.558884497 +0000 UTC m=+1127.795614764" Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.523101 4652 generic.go:334] "Generic (PLEG): container finished" podID="a19b09de-90b9-4718-8f0b-016f84266f36" containerID="8006bf94bf3ddafe44bd37a89de67ebb272ba4214a0482617f43e4b803ea15a2" exitCode=0 Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.523179 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" event={"ID":"a19b09de-90b9-4718-8f0b-016f84266f36","Type":"ContainerDied","Data":"8006bf94bf3ddafe44bd37a89de67ebb272ba4214a0482617f43e4b803ea15a2"} Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.523396 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" event={"ID":"a19b09de-90b9-4718-8f0b-016f84266f36","Type":"ContainerStarted","Data":"9a5db81d66eed29a2be0e4d5752a32512e30802a0266ee2a3b4f0d6e7e5a2b3b"} Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.524649 4652 generic.go:334] "Generic (PLEG): container finished" podID="d187d4b2-4a72-4501-bdfc-dcf4808060f5" containerID="d678705d76ec3eb0fc863b1d1c2025e9489f0b7ed99b478050eab495e14781a8" exitCode=0 Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.524695 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-7ac3-account-create-update-mp544" event={"ID":"d187d4b2-4a72-4501-bdfc-dcf4808060f5","Type":"ContainerDied","Data":"d678705d76ec3eb0fc863b1d1c2025e9489f0b7ed99b478050eab495e14781a8"} Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.526695 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerStarted","Data":"d0032b87c6047940ed678dba19916daf8c432d2d1847756f11978349177d599f"} Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.526721 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerStarted","Data":"b8643dfc6fc892f522bf658687a20f9955c8af627137253df9d385b0fb51e7f9"} Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.527886 4652 generic.go:334] "Generic (PLEG): container finished" podID="519311f0-3163-4215-a7b8-cf7302c9e8f8" containerID="dee17f54db89f2555885f37c6af39842f0a2d842c75518c039d709c20bd3b5a9" exitCode=0 Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.527925 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d224-account-create-update-8fhpr" event={"ID":"519311f0-3163-4215-a7b8-cf7302c9e8f8","Type":"ContainerDied","Data":"dee17f54db89f2555885f37c6af39842f0a2d842c75518c039d709c20bd3b5a9"} Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.530322 4652 generic.go:334] "Generic (PLEG): container finished" podID="99f60875-ac12-428f-9992-29473a3dfb62" containerID="5970cba1ccb5e1474abcdb6fe4ea8b97113ff5d8cd118999bc307f0761a8d80f" exitCode=0 Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.530536 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hg5sm" event={"ID":"99f60875-ac12-428f-9992-29473a3dfb62","Type":"ContainerDied","Data":"5970cba1ccb5e1474abcdb6fe4ea8b97113ff5d8cd118999bc307f0761a8d80f"} Dec 05 05:45:26 crc kubenswrapper[4652]: I1205 05:45:26.968026 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.017041 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.071941 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.086625 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb04f8f-6137-4976-b83c-61694b5e34bb-operator-scripts\") pod \"6eb04f8f-6137-4976-b83c-61694b5e34bb\" (UID: \"6eb04f8f-6137-4976-b83c-61694b5e34bb\") " Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.086671 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ngxl\" (UniqueName: \"kubernetes.io/projected/6eb04f8f-6137-4976-b83c-61694b5e34bb-kube-api-access-7ngxl\") pod \"6eb04f8f-6137-4976-b83c-61694b5e34bb\" (UID: \"6eb04f8f-6137-4976-b83c-61694b5e34bb\") " Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.087055 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eb04f8f-6137-4976-b83c-61694b5e34bb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6eb04f8f-6137-4976-b83c-61694b5e34bb" (UID: "6eb04f8f-6137-4976-b83c-61694b5e34bb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.087474 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb04f8f-6137-4976-b83c-61694b5e34bb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.091134 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eb04f8f-6137-4976-b83c-61694b5e34bb-kube-api-access-7ngxl" (OuterVolumeSpecName: "kube-api-access-7ngxl") pod "6eb04f8f-6137-4976-b83c-61694b5e34bb" (UID: "6eb04f8f-6137-4976-b83c-61694b5e34bb"). InnerVolumeSpecName "kube-api-access-7ngxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.188608 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-operator-scripts\") pod \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\" (UID: \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\") " Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.188766 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sgtz\" (UniqueName: \"kubernetes.io/projected/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-kube-api-access-4sgtz\") pod \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\" (UID: \"7620a38c-f020-46b9-b5b7-34e4d3a0f96b\") " Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.189290 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ngxl\" (UniqueName: \"kubernetes.io/projected/6eb04f8f-6137-4976-b83c-61694b5e34bb-kube-api-access-7ngxl\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.190092 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7620a38c-f020-46b9-b5b7-34e4d3a0f96b" (UID: "7620a38c-f020-46b9-b5b7-34e4d3a0f96b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.192738 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-kube-api-access-4sgtz" (OuterVolumeSpecName: "kube-api-access-4sgtz") pod "7620a38c-f020-46b9-b5b7-34e4d3a0f96b" (UID: "7620a38c-f020-46b9-b5b7-34e4d3a0f96b"). InnerVolumeSpecName "kube-api-access-4sgtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.290897 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.290928 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sgtz\" (UniqueName: \"kubernetes.io/projected/7620a38c-f020-46b9-b5b7-34e4d3a0f96b-kube-api-access-4sgtz\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.538839 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerStarted","Data":"54680cdff8ca78812d3cf4eb177d8f01974aaf80fbe6e8217a3959f8479aa127"} Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.540645 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tj9c4" event={"ID":"7620a38c-f020-46b9-b5b7-34e4d3a0f96b","Type":"ContainerDied","Data":"099e7d286e06e975aa7e04d994382927c6029ff7a184d1c5cab74b8447362729"} Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.540875 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tj9c4" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.540892 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="099e7d286e06e975aa7e04d994382927c6029ff7a184d1c5cab74b8447362729" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.542239 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-rjhbt" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.544685 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-rjhbt" event={"ID":"6eb04f8f-6137-4976-b83c-61694b5e34bb","Type":"ContainerDied","Data":"6e444bac37b3b7d148dea00705a1927fd77894a6f6170a7784d4a10e061041b7"} Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.544730 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e444bac37b3b7d148dea00705a1927fd77894a6f6170a7784d4a10e061041b7" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.826901 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.903453 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zgzl\" (UniqueName: \"kubernetes.io/projected/d187d4b2-4a72-4501-bdfc-dcf4808060f5-kube-api-access-8zgzl\") pod \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\" (UID: \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\") " Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.903719 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d187d4b2-4a72-4501-bdfc-dcf4808060f5-operator-scripts\") pod \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\" (UID: \"d187d4b2-4a72-4501-bdfc-dcf4808060f5\") " Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.904530 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d187d4b2-4a72-4501-bdfc-dcf4808060f5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d187d4b2-4a72-4501-bdfc-dcf4808060f5" (UID: "d187d4b2-4a72-4501-bdfc-dcf4808060f5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:27 crc kubenswrapper[4652]: I1205 05:45:27.909750 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d187d4b2-4a72-4501-bdfc-dcf4808060f5-kube-api-access-8zgzl" (OuterVolumeSpecName: "kube-api-access-8zgzl") pod "d187d4b2-4a72-4501-bdfc-dcf4808060f5" (UID: "d187d4b2-4a72-4501-bdfc-dcf4808060f5"). InnerVolumeSpecName "kube-api-access-8zgzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.005990 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d187d4b2-4a72-4501-bdfc-dcf4808060f5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.006021 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zgzl\" (UniqueName: \"kubernetes.io/projected/d187d4b2-4a72-4501-bdfc-dcf4808060f5-kube-api-access-8zgzl\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.100457 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.105418 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.117398 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.208401 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519311f0-3163-4215-a7b8-cf7302c9e8f8-operator-scripts\") pod \"519311f0-3163-4215-a7b8-cf7302c9e8f8\" (UID: \"519311f0-3163-4215-a7b8-cf7302c9e8f8\") " Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.208468 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzz9j\" (UniqueName: \"kubernetes.io/projected/99f60875-ac12-428f-9992-29473a3dfb62-kube-api-access-tzz9j\") pod \"99f60875-ac12-428f-9992-29473a3dfb62\" (UID: \"99f60875-ac12-428f-9992-29473a3dfb62\") " Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.208574 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19b09de-90b9-4718-8f0b-016f84266f36-operator-scripts\") pod \"a19b09de-90b9-4718-8f0b-016f84266f36\" (UID: \"a19b09de-90b9-4718-8f0b-016f84266f36\") " Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.208596 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-488zp\" (UniqueName: \"kubernetes.io/projected/a19b09de-90b9-4718-8f0b-016f84266f36-kube-api-access-488zp\") pod \"a19b09de-90b9-4718-8f0b-016f84266f36\" (UID: \"a19b09de-90b9-4718-8f0b-016f84266f36\") " Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.209014 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f60875-ac12-428f-9992-29473a3dfb62-operator-scripts\") pod \"99f60875-ac12-428f-9992-29473a3dfb62\" (UID: \"99f60875-ac12-428f-9992-29473a3dfb62\") " Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.209041 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrhkf\" (UniqueName: \"kubernetes.io/projected/519311f0-3163-4215-a7b8-cf7302c9e8f8-kube-api-access-lrhkf\") pod \"519311f0-3163-4215-a7b8-cf7302c9e8f8\" (UID: \"519311f0-3163-4215-a7b8-cf7302c9e8f8\") " Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.210112 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99f60875-ac12-428f-9992-29473a3dfb62-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "99f60875-ac12-428f-9992-29473a3dfb62" (UID: "99f60875-ac12-428f-9992-29473a3dfb62"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.210131 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/519311f0-3163-4215-a7b8-cf7302c9e8f8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "519311f0-3163-4215-a7b8-cf7302c9e8f8" (UID: "519311f0-3163-4215-a7b8-cf7302c9e8f8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.210165 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a19b09de-90b9-4718-8f0b-016f84266f36-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a19b09de-90b9-4718-8f0b-016f84266f36" (UID: "a19b09de-90b9-4718-8f0b-016f84266f36"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.211101 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99f60875-ac12-428f-9992-29473a3dfb62-kube-api-access-tzz9j" (OuterVolumeSpecName: "kube-api-access-tzz9j") pod "99f60875-ac12-428f-9992-29473a3dfb62" (UID: "99f60875-ac12-428f-9992-29473a3dfb62"). InnerVolumeSpecName "kube-api-access-tzz9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.212762 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a19b09de-90b9-4718-8f0b-016f84266f36-kube-api-access-488zp" (OuterVolumeSpecName: "kube-api-access-488zp") pod "a19b09de-90b9-4718-8f0b-016f84266f36" (UID: "a19b09de-90b9-4718-8f0b-016f84266f36"). InnerVolumeSpecName "kube-api-access-488zp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.215772 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/519311f0-3163-4215-a7b8-cf7302c9e8f8-kube-api-access-lrhkf" (OuterVolumeSpecName: "kube-api-access-lrhkf") pod "519311f0-3163-4215-a7b8-cf7302c9e8f8" (UID: "519311f0-3163-4215-a7b8-cf7302c9e8f8"). InnerVolumeSpecName "kube-api-access-lrhkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.313399 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19b09de-90b9-4718-8f0b-016f84266f36-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.313429 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-488zp\" (UniqueName: \"kubernetes.io/projected/a19b09de-90b9-4718-8f0b-016f84266f36-kube-api-access-488zp\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.313441 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99f60875-ac12-428f-9992-29473a3dfb62-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.313451 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrhkf\" (UniqueName: \"kubernetes.io/projected/519311f0-3163-4215-a7b8-cf7302c9e8f8-kube-api-access-lrhkf\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.313464 4652 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519311f0-3163-4215-a7b8-cf7302c9e8f8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.313475 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzz9j\" (UniqueName: \"kubernetes.io/projected/99f60875-ac12-428f-9992-29473a3dfb62-kube-api-access-tzz9j\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.554736 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerStarted","Data":"86e9bba08c721073d2815381c8bd95aaee03903d813502660b41c0c11281c16a"} Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.554937 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.558025 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d224-account-create-update-8fhpr" event={"ID":"519311f0-3163-4215-a7b8-cf7302c9e8f8","Type":"ContainerDied","Data":"023f2663410f258974c28e1bac547c69856628bda315fa6228f66af8148bc256"} Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.558064 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="023f2663410f258974c28e1bac547c69856628bda315fa6228f66af8148bc256" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.558117 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d224-account-create-update-8fhpr" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.559343 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hg5sm" event={"ID":"99f60875-ac12-428f-9992-29473a3dfb62","Type":"ContainerDied","Data":"a461bd9756fea324a3aefe01bc8c9ef164180f99d774bf18dd87c2b84c3c650a"} Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.559377 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a461bd9756fea324a3aefe01bc8c9ef164180f99d774bf18dd87c2b84c3c650a" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.559429 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hg5sm" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.581353 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" event={"ID":"a19b09de-90b9-4718-8f0b-016f84266f36","Type":"ContainerDied","Data":"9a5db81d66eed29a2be0e4d5752a32512e30802a0266ee2a3b4f0d6e7e5a2b3b"} Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.581423 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a5db81d66eed29a2be0e4d5752a32512e30802a0266ee2a3b4f0d6e7e5a2b3b" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.581479 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-0b21-account-create-update-8zzvd" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.583711 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-7ac3-account-create-update-mp544" event={"ID":"d187d4b2-4a72-4501-bdfc-dcf4808060f5","Type":"ContainerDied","Data":"fa2a4c4562371d56b707c39ee3ab5069003a39be12d2ac24bdc05deeacfc45dd"} Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.583747 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa2a4c4562371d56b707c39ee3ab5069003a39be12d2ac24bdc05deeacfc45dd" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.583791 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-7ac3-account-create-update-mp544" Dec 05 05:45:28 crc kubenswrapper[4652]: I1205 05:45:28.588767 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.087602579 podStartE2EDuration="5.588755838s" podCreationTimestamp="2025-12-05 05:45:23 +0000 UTC" firstStartedPulling="2025-12-05 05:45:24.700673526 +0000 UTC m=+1126.937403793" lastFinishedPulling="2025-12-05 05:45:28.201826784 +0000 UTC m=+1130.438557052" observedRunningTime="2025-12-05 05:45:28.573875644 +0000 UTC m=+1130.810605912" watchObservedRunningTime="2025-12-05 05:45:28.588755838 +0000 UTC m=+1130.825486105" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.126132 4652 scope.go:117] "RemoveContainer" containerID="7d19c23409283640cb1bd3a0d34c8d831faf4762b4108458c1b8b95e16face3c" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.603096 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerStarted","Data":"b83ad4af98342e82451a246dbb5c9c80a4ca4ce922da71c27da3b2c387289f0d"} Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.884790 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cmts"] Dec 05 05:45:29 crc kubenswrapper[4652]: E1205 05:45:29.885583 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eb04f8f-6137-4976-b83c-61694b5e34bb" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.885604 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eb04f8f-6137-4976-b83c-61694b5e34bb" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: E1205 05:45:29.885624 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519311f0-3163-4215-a7b8-cf7302c9e8f8" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.885630 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="519311f0-3163-4215-a7b8-cf7302c9e8f8" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: E1205 05:45:29.885645 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d187d4b2-4a72-4501-bdfc-dcf4808060f5" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.885651 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d187d4b2-4a72-4501-bdfc-dcf4808060f5" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: E1205 05:45:29.885680 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f60875-ac12-428f-9992-29473a3dfb62" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.885685 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f60875-ac12-428f-9992-29473a3dfb62" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: E1205 05:45:29.885704 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7620a38c-f020-46b9-b5b7-34e4d3a0f96b" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.885710 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="7620a38c-f020-46b9-b5b7-34e4d3a0f96b" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: E1205 05:45:29.885726 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a19b09de-90b9-4718-8f0b-016f84266f36" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.885732 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a19b09de-90b9-4718-8f0b-016f84266f36" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.886245 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="519311f0-3163-4215-a7b8-cf7302c9e8f8" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.886276 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d187d4b2-4a72-4501-bdfc-dcf4808060f5" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.886338 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="a19b09de-90b9-4718-8f0b-016f84266f36" containerName="mariadb-account-create-update" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.886357 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eb04f8f-6137-4976-b83c-61694b5e34bb" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.886371 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="99f60875-ac12-428f-9992-29473a3dfb62" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.886399 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="7620a38c-f020-46b9-b5b7-34e4d3a0f96b" containerName="mariadb-database-create" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.888783 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.896284 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.896488 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5wsr5" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.896735 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.926461 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cmts"] Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.944233 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.944286 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-config-data\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.944316 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-scripts\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:29 crc kubenswrapper[4652]: I1205 05:45:29.944424 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27wcp\" (UniqueName: \"kubernetes.io/projected/3516f65d-a205-4768-b345-cee580a16b09-kube-api-access-27wcp\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.020191 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7744f5c9d6-t75mf" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.159:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.159:8443: connect: connection refused" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.020736 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.046530 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27wcp\" (UniqueName: \"kubernetes.io/projected/3516f65d-a205-4768-b345-cee580a16b09-kube-api-access-27wcp\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.046735 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.046778 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-config-data\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.046819 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-scripts\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.053104 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.055388 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-config-data\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.055817 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-scripts\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.064769 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27wcp\" (UniqueName: \"kubernetes.io/projected/3516f65d-a205-4768-b345-cee580a16b09-kube-api-access-27wcp\") pod \"nova-cell0-conductor-db-sync-6cmts\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.225312 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:30 crc kubenswrapper[4652]: I1205 05:45:30.753378 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cmts"] Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.386213 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.386439 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="ceilometer-central-agent" containerID="cri-o://b8643dfc6fc892f522bf658687a20f9955c8af627137253df9d385b0fb51e7f9" gracePeriod=30 Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.386570 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="proxy-httpd" containerID="cri-o://86e9bba08c721073d2815381c8bd95aaee03903d813502660b41c0c11281c16a" gracePeriod=30 Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.386616 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="sg-core" containerID="cri-o://54680cdff8ca78812d3cf4eb177d8f01974aaf80fbe6e8217a3959f8479aa127" gracePeriod=30 Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.386650 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="ceilometer-notification-agent" containerID="cri-o://d0032b87c6047940ed678dba19916daf8c432d2d1847756f11978349177d599f" gracePeriod=30 Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.642310 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6cmts" event={"ID":"3516f65d-a205-4768-b345-cee580a16b09","Type":"ContainerStarted","Data":"1a133634d1ada4590cbb18d31b99abae52e4b9761d617bade728862abfef775b"} Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.647522 4652 generic.go:334] "Generic (PLEG): container finished" podID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerID="86e9bba08c721073d2815381c8bd95aaee03903d813502660b41c0c11281c16a" exitCode=0 Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.647593 4652 generic.go:334] "Generic (PLEG): container finished" podID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerID="54680cdff8ca78812d3cf4eb177d8f01974aaf80fbe6e8217a3959f8479aa127" exitCode=2 Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.647624 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerDied","Data":"86e9bba08c721073d2815381c8bd95aaee03903d813502660b41c0c11281c16a"} Dec 05 05:45:31 crc kubenswrapper[4652]: I1205 05:45:31.647653 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerDied","Data":"54680cdff8ca78812d3cf4eb177d8f01974aaf80fbe6e8217a3959f8479aa127"} Dec 05 05:45:31 crc kubenswrapper[4652]: E1205 05:45:31.755211 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2116de3c_a80b_442e_8de5_a4a8381e345e.slice/crio-b8643dfc6fc892f522bf658687a20f9955c8af627137253df9d385b0fb51e7f9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2116de3c_a80b_442e_8de5_a4a8381e345e.slice/crio-conmon-d0032b87c6047940ed678dba19916daf8c432d2d1847756f11978349177d599f.scope\": RecentStats: unable to find data in memory cache]" Dec 05 05:45:32 crc kubenswrapper[4652]: I1205 05:45:32.333207 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:32 crc kubenswrapper[4652]: I1205 05:45:32.335448 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" Dec 05 05:45:32 crc kubenswrapper[4652]: I1205 05:45:32.680345 4652 generic.go:334] "Generic (PLEG): container finished" podID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerID="d0032b87c6047940ed678dba19916daf8c432d2d1847756f11978349177d599f" exitCode=0 Dec 05 05:45:32 crc kubenswrapper[4652]: I1205 05:45:32.680376 4652 generic.go:334] "Generic (PLEG): container finished" podID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerID="b8643dfc6fc892f522bf658687a20f9955c8af627137253df9d385b0fb51e7f9" exitCode=0 Dec 05 05:45:32 crc kubenswrapper[4652]: I1205 05:45:32.680411 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerDied","Data":"d0032b87c6047940ed678dba19916daf8c432d2d1847756f11978349177d599f"} Dec 05 05:45:32 crc kubenswrapper[4652]: I1205 05:45:32.680460 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerDied","Data":"b8643dfc6fc892f522bf658687a20f9955c8af627137253df9d385b0fb51e7f9"} Dec 05 05:45:33 crc kubenswrapper[4652]: I1205 05:45:33.707029 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:33 crc kubenswrapper[4652]: I1205 05:45:33.709038 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-httpd" containerID="cri-o://67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee" gracePeriod=30 Dec 05 05:45:33 crc kubenswrapper[4652]: I1205 05:45:33.709404 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-log" containerID="cri-o://a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33" gracePeriod=30 Dec 05 05:45:33 crc kubenswrapper[4652]: I1205 05:45:33.716752 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.180:9292/healthcheck\": EOF" Dec 05 05:45:33 crc kubenswrapper[4652]: I1205 05:45:33.721935 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/glance-default-external-api-0" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.180:9292/healthcheck\": EOF" Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.534205 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.534453 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-httpd" containerID="cri-o://86e31d072f4e3486b357fe0bf2c9aadcf2e4861cdcc0f2e2ec938a7319aca2df" gracePeriod=30 Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.534614 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-log" containerID="cri-o://8792eb6cec75227540052c82010fe2cb97fbbdd431c72ef97e27a65613474b8b" gracePeriod=30 Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.553434 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.181:9292/healthcheck\": EOF" Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.554150 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/glance-default-internal-api-0" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.181:9292/healthcheck\": EOF" Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.554358 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/glance-default-internal-api-0" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.181:9292/healthcheck\": EOF" Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.710041 4652 generic.go:334] "Generic (PLEG): container finished" podID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerID="a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33" exitCode=143 Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.710161 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f42d1f3-d588-4065-8c30-41c5309f45cb","Type":"ContainerDied","Data":"a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33"} Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.714183 4652 generic.go:334] "Generic (PLEG): container finished" podID="4dccdfc3-432a-459c-a940-1d167d41405b" containerID="8792eb6cec75227540052c82010fe2cb97fbbdd431c72ef97e27a65613474b8b" exitCode=143 Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.714245 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4dccdfc3-432a-459c-a940-1d167d41405b","Type":"ContainerDied","Data":"8792eb6cec75227540052c82010fe2cb97fbbdd431c72ef97e27a65613474b8b"} Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.714280 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.714320 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:34 crc kubenswrapper[4652]: I1205 05:45:34.754695 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:35 crc kubenswrapper[4652]: I1205 05:45:35.733895 4652 generic.go:334] "Generic (PLEG): container finished" podID="4e04a533-fea2-4fde-a50b-5852129fa912" containerID="1a97cb99478e8e1f70f698c3e507c2e8e7e789636755f4bc50bd106c55435730" exitCode=137 Dec 05 05:45:35 crc kubenswrapper[4652]: I1205 05:45:35.735077 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7744f5c9d6-t75mf" event={"ID":"4e04a533-fea2-4fde-a50b-5852129fa912","Type":"ContainerDied","Data":"1a97cb99478e8e1f70f698c3e507c2e8e7e789636755f4bc50bd106c55435730"} Dec 05 05:45:35 crc kubenswrapper[4652]: I1205 05:45:35.763139 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.264793 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.312093 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e04a533-fea2-4fde-a50b-5852129fa912-logs\") pod \"4e04a533-fea2-4fde-a50b-5852129fa912\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.312194 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-scripts\") pod \"4e04a533-fea2-4fde-a50b-5852129fa912\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.312409 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-config-data\") pod \"4e04a533-fea2-4fde-a50b-5852129fa912\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.312531 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5l58\" (UniqueName: \"kubernetes.io/projected/4e04a533-fea2-4fde-a50b-5852129fa912-kube-api-access-s5l58\") pod \"4e04a533-fea2-4fde-a50b-5852129fa912\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.312639 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-tls-certs\") pod \"4e04a533-fea2-4fde-a50b-5852129fa912\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.312682 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-secret-key\") pod \"4e04a533-fea2-4fde-a50b-5852129fa912\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.312701 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-combined-ca-bundle\") pod \"4e04a533-fea2-4fde-a50b-5852129fa912\" (UID: \"4e04a533-fea2-4fde-a50b-5852129fa912\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.314646 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e04a533-fea2-4fde-a50b-5852129fa912-logs" (OuterVolumeSpecName: "logs") pod "4e04a533-fea2-4fde-a50b-5852129fa912" (UID: "4e04a533-fea2-4fde-a50b-5852129fa912"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.318026 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e04a533-fea2-4fde-a50b-5852129fa912-kube-api-access-s5l58" (OuterVolumeSpecName: "kube-api-access-s5l58") pod "4e04a533-fea2-4fde-a50b-5852129fa912" (UID: "4e04a533-fea2-4fde-a50b-5852129fa912"). InnerVolumeSpecName "kube-api-access-s5l58". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.320255 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4e04a533-fea2-4fde-a50b-5852129fa912" (UID: "4e04a533-fea2-4fde-a50b-5852129fa912"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.339067 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-scripts" (OuterVolumeSpecName: "scripts") pod "4e04a533-fea2-4fde-a50b-5852129fa912" (UID: "4e04a533-fea2-4fde-a50b-5852129fa912"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.343448 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-config-data" (OuterVolumeSpecName: "config-data") pod "4e04a533-fea2-4fde-a50b-5852129fa912" (UID: "4e04a533-fea2-4fde-a50b-5852129fa912"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.371435 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e04a533-fea2-4fde-a50b-5852129fa912" (UID: "4e04a533-fea2-4fde-a50b-5852129fa912"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.374478 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "4e04a533-fea2-4fde-a50b-5852129fa912" (UID: "4e04a533-fea2-4fde-a50b-5852129fa912"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.415824 4652 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.415857 4652 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.415867 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e04a533-fea2-4fde-a50b-5852129fa912-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.415883 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e04a533-fea2-4fde-a50b-5852129fa912-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.415895 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.415903 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e04a533-fea2-4fde-a50b-5852129fa912-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.415911 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5l58\" (UniqueName: \"kubernetes.io/projected/4e04a533-fea2-4fde-a50b-5852129fa912-kube-api-access-s5l58\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.437085 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.502637 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.517228 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-combined-ca-bundle\") pod \"2116de3c-a80b-442e-8de5-a4a8381e345e\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.517488 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-run-httpd\") pod \"2116de3c-a80b-442e-8de5-a4a8381e345e\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.517542 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg79b\" (UniqueName: \"kubernetes.io/projected/2116de3c-a80b-442e-8de5-a4a8381e345e-kube-api-access-lg79b\") pod \"2116de3c-a80b-442e-8de5-a4a8381e345e\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.517676 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-scripts\") pod \"2116de3c-a80b-442e-8de5-a4a8381e345e\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.517706 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-config-data\") pod \"2116de3c-a80b-442e-8de5-a4a8381e345e\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.517724 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-sg-core-conf-yaml\") pod \"2116de3c-a80b-442e-8de5-a4a8381e345e\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.517749 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-log-httpd\") pod \"2116de3c-a80b-442e-8de5-a4a8381e345e\" (UID: \"2116de3c-a80b-442e-8de5-a4a8381e345e\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.519368 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2116de3c-a80b-442e-8de5-a4a8381e345e" (UID: "2116de3c-a80b-442e-8de5-a4a8381e345e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.519958 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2116de3c-a80b-442e-8de5-a4a8381e345e" (UID: "2116de3c-a80b-442e-8de5-a4a8381e345e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.523377 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2116de3c-a80b-442e-8de5-a4a8381e345e-kube-api-access-lg79b" (OuterVolumeSpecName: "kube-api-access-lg79b") pod "2116de3c-a80b-442e-8de5-a4a8381e345e" (UID: "2116de3c-a80b-442e-8de5-a4a8381e345e"). InnerVolumeSpecName "kube-api-access-lg79b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.532007 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-scripts" (OuterVolumeSpecName: "scripts") pod "2116de3c-a80b-442e-8de5-a4a8381e345e" (UID: "2116de3c-a80b-442e-8de5-a4a8381e345e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.558058 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2116de3c-a80b-442e-8de5-a4a8381e345e" (UID: "2116de3c-a80b-442e-8de5-a4a8381e345e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.604629 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2116de3c-a80b-442e-8de5-a4a8381e345e" (UID: "2116de3c-a80b-442e-8de5-a4a8381e345e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.620976 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-config-data\") pod \"9f42d1f3-d588-4065-8c30-41c5309f45cb\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621021 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-logs\") pod \"9f42d1f3-d588-4065-8c30-41c5309f45cb\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621053 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-httpd-run\") pod \"9f42d1f3-d588-4065-8c30-41c5309f45cb\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621252 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"9f42d1f3-d588-4065-8c30-41c5309f45cb\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621406 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-public-tls-certs\") pod \"9f42d1f3-d588-4065-8c30-41c5309f45cb\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621464 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t79bc\" (UniqueName: \"kubernetes.io/projected/9f42d1f3-d588-4065-8c30-41c5309f45cb-kube-api-access-t79bc\") pod \"9f42d1f3-d588-4065-8c30-41c5309f45cb\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621507 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-scripts\") pod \"9f42d1f3-d588-4065-8c30-41c5309f45cb\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621548 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-combined-ca-bundle\") pod \"9f42d1f3-d588-4065-8c30-41c5309f45cb\" (UID: \"9f42d1f3-d588-4065-8c30-41c5309f45cb\") " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621536 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-logs" (OuterVolumeSpecName: "logs") pod "9f42d1f3-d588-4065-8c30-41c5309f45cb" (UID: "9f42d1f3-d588-4065-8c30-41c5309f45cb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.621606 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9f42d1f3-d588-4065-8c30-41c5309f45cb" (UID: "9f42d1f3-d588-4065-8c30-41c5309f45cb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.622447 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.622474 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.622487 4652 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f42d1f3-d588-4065-8c30-41c5309f45cb-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.622507 4652 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.622516 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg79b\" (UniqueName: \"kubernetes.io/projected/2116de3c-a80b-442e-8de5-a4a8381e345e-kube-api-access-lg79b\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.622526 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.622536 4652 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.622545 4652 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2116de3c-a80b-442e-8de5-a4a8381e345e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.625195 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f42d1f3-d588-4065-8c30-41c5309f45cb-kube-api-access-t79bc" (OuterVolumeSpecName: "kube-api-access-t79bc") pod "9f42d1f3-d588-4065-8c30-41c5309f45cb" (UID: "9f42d1f3-d588-4065-8c30-41c5309f45cb"). InnerVolumeSpecName "kube-api-access-t79bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.625672 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-scripts" (OuterVolumeSpecName: "scripts") pod "9f42d1f3-d588-4065-8c30-41c5309f45cb" (UID: "9f42d1f3-d588-4065-8c30-41c5309f45cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.627707 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "9f42d1f3-d588-4065-8c30-41c5309f45cb" (UID: "9f42d1f3-d588-4065-8c30-41c5309f45cb"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.633253 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-config-data" (OuterVolumeSpecName: "config-data") pod "2116de3c-a80b-442e-8de5-a4a8381e345e" (UID: "2116de3c-a80b-442e-8de5-a4a8381e345e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.648051 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f42d1f3-d588-4065-8c30-41c5309f45cb" (UID: "9f42d1f3-d588-4065-8c30-41c5309f45cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.667192 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-config-data" (OuterVolumeSpecName: "config-data") pod "9f42d1f3-d588-4065-8c30-41c5309f45cb" (UID: "9f42d1f3-d588-4065-8c30-41c5309f45cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.675093 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9f42d1f3-d588-4065-8c30-41c5309f45cb" (UID: "9f42d1f3-d588-4065-8c30-41c5309f45cb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.727442 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.727484 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2116de3c-a80b-442e-8de5-a4a8381e345e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.727506 4652 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.727520 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t79bc\" (UniqueName: \"kubernetes.io/projected/9f42d1f3-d588-4065-8c30-41c5309f45cb-kube-api-access-t79bc\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.727531 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.727541 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.727566 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f42d1f3-d588-4065-8c30-41c5309f45cb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.747624 4652 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.758513 4652 generic.go:334] "Generic (PLEG): container finished" podID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerID="67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee" exitCode=0 Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.758607 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f42d1f3-d588-4065-8c30-41c5309f45cb","Type":"ContainerDied","Data":"67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee"} Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.758636 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f42d1f3-d588-4065-8c30-41c5309f45cb","Type":"ContainerDied","Data":"923c2729f6783933dc99cadd16d54e26c05c323ce357f5478ce7798fd8b6852e"} Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.758656 4652 scope.go:117] "RemoveContainer" containerID="67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.758783 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.769574 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2116de3c-a80b-442e-8de5-a4a8381e345e","Type":"ContainerDied","Data":"422e021ff3124358e7a568e0fb33a941656a158cfb2007cc1da631ef79333426"} Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.770258 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.772984 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7ee40e5d-1fe1-4d60-ac89-85beb2755efa","Type":"ContainerStarted","Data":"97a9a9a76cdf4dcca237890db148295de3f41a5b66c84493dc811bf5000c34c0"} Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.779966 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7744f5c9d6-t75mf" event={"ID":"4e04a533-fea2-4fde-a50b-5852129fa912","Type":"ContainerDied","Data":"66675ee0f6ef1f0335f425f873080723b61535259df2b40a7e2022fbbc77a373"} Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.780875 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7744f5c9d6-t75mf" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.795972 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.697390762 podStartE2EDuration="14.795962047s" podCreationTimestamp="2025-12-05 05:45:22 +0000 UTC" firstStartedPulling="2025-12-05 05:45:22.920489435 +0000 UTC m=+1125.157219702" lastFinishedPulling="2025-12-05 05:45:36.01906072 +0000 UTC m=+1138.255790987" observedRunningTime="2025-12-05 05:45:36.79402654 +0000 UTC m=+1139.030756807" watchObservedRunningTime="2025-12-05 05:45:36.795962047 +0000 UTC m=+1139.032692315" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.819990 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.832147 4652 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.840776 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.853662 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.871281 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.871842 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-httpd" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.871862 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-httpd" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.871882 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon-log" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.871888 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon-log" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.871900 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="ceilometer-central-agent" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.871906 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="ceilometer-central-agent" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.871922 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="proxy-httpd" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.871928 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="proxy-httpd" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.871938 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="sg-core" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.871943 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="sg-core" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.871968 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.871975 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.871987 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-log" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.871992 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-log" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.872006 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="ceilometer-notification-agent" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872011 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="ceilometer-notification-agent" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872198 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon-log" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872213 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-log" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872222 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="ceilometer-notification-agent" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872234 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="ceilometer-central-agent" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872246 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="sg-core" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872257 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" containerName="glance-httpd" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872269 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" containerName="proxy-httpd" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.872278 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" containerName="horizon" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.873419 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.876924 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.877210 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.883848 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.885410 4652 scope.go:117] "RemoveContainer" containerID="a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.892591 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7744f5c9d6-t75mf"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.899686 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7744f5c9d6-t75mf"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.907605 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.913983 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.916686 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.918000 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.920036 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.921695 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.938101 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eac02c10-37cb-4ddc-9b04-010c4fe70817-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.938136 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.938254 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.938300 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvctj\" (UniqueName: \"kubernetes.io/projected/eac02c10-37cb-4ddc-9b04-010c4fe70817-kube-api-access-fvctj\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.938360 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.938461 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-scripts\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.938486 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eac02c10-37cb-4ddc-9b04-010c4fe70817-logs\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.938784 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-config-data\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.975899 4652 scope.go:117] "RemoveContainer" containerID="67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.979400 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee\": container with ID starting with 67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee not found: ID does not exist" containerID="67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.979432 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee"} err="failed to get container status \"67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee\": rpc error: code = NotFound desc = could not find container \"67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee\": container with ID starting with 67f127104f2768a68cedd8efe536bf79fd23363a409009e7326da5ca9b703cee not found: ID does not exist" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.979451 4652 scope.go:117] "RemoveContainer" containerID="a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33" Dec 05 05:45:36 crc kubenswrapper[4652]: E1205 05:45:36.979991 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33\": container with ID starting with a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33 not found: ID does not exist" containerID="a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.980015 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33"} err="failed to get container status \"a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33\": rpc error: code = NotFound desc = could not find container \"a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33\": container with ID starting with a086c7376486461da2abb199582d6771b4773a985d6973b65cb1ffa4f79a2b33 not found: ID does not exist" Dec 05 05:45:36 crc kubenswrapper[4652]: I1205 05:45:36.980028 4652 scope.go:117] "RemoveContainer" containerID="86e9bba08c721073d2815381c8bd95aaee03903d813502660b41c0c11281c16a" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.021903 4652 scope.go:117] "RemoveContainer" containerID="54680cdff8ca78812d3cf4eb177d8f01974aaf80fbe6e8217a3959f8479aa127" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041714 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-scripts\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041755 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eac02c10-37cb-4ddc-9b04-010c4fe70817-logs\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041798 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-config-data\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041831 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-scripts\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041860 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-log-httpd\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041892 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eac02c10-37cb-4ddc-9b04-010c4fe70817-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041913 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041935 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041951 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q44c7\" (UniqueName: \"kubernetes.io/projected/55caee20-77b3-4137-b11a-60ed9aaf2de3-kube-api-access-q44c7\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041971 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.041996 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-run-httpd\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.042025 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.042051 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvctj\" (UniqueName: \"kubernetes.io/projected/eac02c10-37cb-4ddc-9b04-010c4fe70817-kube-api-access-fvctj\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.042067 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-config-data\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.042104 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.043108 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eac02c10-37cb-4ddc-9b04-010c4fe70817-logs\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.044473 4652 scope.go:117] "RemoveContainer" containerID="d0032b87c6047940ed678dba19916daf8c432d2d1847756f11978349177d599f" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.044680 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.044777 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eac02c10-37cb-4ddc-9b04-010c4fe70817-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.050588 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-config-data\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.051095 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-scripts\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.062048 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.062530 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eac02c10-37cb-4ddc-9b04-010c4fe70817-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.064624 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvctj\" (UniqueName: \"kubernetes.io/projected/eac02c10-37cb-4ddc-9b04-010c4fe70817-kube-api-access-fvctj\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.076183 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"eac02c10-37cb-4ddc-9b04-010c4fe70817\") " pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.146544 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-config-data\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.146732 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-scripts\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.146764 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-log-httpd\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.146802 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.146825 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q44c7\" (UniqueName: \"kubernetes.io/projected/55caee20-77b3-4137-b11a-60ed9aaf2de3-kube-api-access-q44c7\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.146854 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.146877 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-run-httpd\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.147720 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-log-httpd\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.147757 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-run-httpd\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.150662 4652 scope.go:117] "RemoveContainer" containerID="b8643dfc6fc892f522bf658687a20f9955c8af627137253df9d385b0fb51e7f9" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.151749 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-scripts\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.163857 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.165078 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.165297 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q44c7\" (UniqueName: \"kubernetes.io/projected/55caee20-77b3-4137-b11a-60ed9aaf2de3-kube-api-access-q44c7\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.175629 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-config-data\") pod \"ceilometer-0\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.204417 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.265438 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.266398 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.288641 4652 scope.go:117] "RemoveContainer" containerID="7c9d3fec70cacda0b535157a56f5d7fa1a3c1cedf136c200c7a49e146e79f017" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.492864 4652 scope.go:117] "RemoveContainer" containerID="1a97cb99478e8e1f70f698c3e507c2e8e7e789636755f4bc50bd106c55435730" Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.780713 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.789967 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.809465 4652 generic.go:334] "Generic (PLEG): container finished" podID="4dccdfc3-432a-459c-a940-1d167d41405b" containerID="86e31d072f4e3486b357fe0bf2c9aadcf2e4861cdcc0f2e2ec938a7319aca2df" exitCode=0 Dec 05 05:45:37 crc kubenswrapper[4652]: I1205 05:45:37.809811 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4dccdfc3-432a-459c-a940-1d167d41405b","Type":"ContainerDied","Data":"86e31d072f4e3486b357fe0bf2c9aadcf2e4861cdcc0f2e2ec938a7319aca2df"} Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.132831 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.143788 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2116de3c-a80b-442e-8de5-a4a8381e345e" path="/var/lib/kubelet/pods/2116de3c-a80b-442e-8de5-a4a8381e345e/volumes" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.146714 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e04a533-fea2-4fde-a50b-5852129fa912" path="/var/lib/kubelet/pods/4e04a533-fea2-4fde-a50b-5852129fa912/volumes" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.149384 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f42d1f3-d588-4065-8c30-41c5309f45cb" path="/var/lib/kubelet/pods/9f42d1f3-d588-4065-8c30-41c5309f45cb/volumes" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.186461 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-logs\") pod \"4dccdfc3-432a-459c-a940-1d167d41405b\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.186520 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-combined-ca-bundle\") pod \"4dccdfc3-432a-459c-a940-1d167d41405b\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.186613 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-config-data\") pod \"4dccdfc3-432a-459c-a940-1d167d41405b\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.186631 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-httpd-run\") pod \"4dccdfc3-432a-459c-a940-1d167d41405b\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.186657 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl4rv\" (UniqueName: \"kubernetes.io/projected/4dccdfc3-432a-459c-a940-1d167d41405b-kube-api-access-cl4rv\") pod \"4dccdfc3-432a-459c-a940-1d167d41405b\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.186718 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-scripts\") pod \"4dccdfc3-432a-459c-a940-1d167d41405b\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.186739 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-internal-tls-certs\") pod \"4dccdfc3-432a-459c-a940-1d167d41405b\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.186771 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"4dccdfc3-432a-459c-a940-1d167d41405b\" (UID: \"4dccdfc3-432a-459c-a940-1d167d41405b\") " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.188156 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4dccdfc3-432a-459c-a940-1d167d41405b" (UID: "4dccdfc3-432a-459c-a940-1d167d41405b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.188393 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-logs" (OuterVolumeSpecName: "logs") pod "4dccdfc3-432a-459c-a940-1d167d41405b" (UID: "4dccdfc3-432a-459c-a940-1d167d41405b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.195077 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dccdfc3-432a-459c-a940-1d167d41405b-kube-api-access-cl4rv" (OuterVolumeSpecName: "kube-api-access-cl4rv") pod "4dccdfc3-432a-459c-a940-1d167d41405b" (UID: "4dccdfc3-432a-459c-a940-1d167d41405b"). InnerVolumeSpecName "kube-api-access-cl4rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.199086 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-scripts" (OuterVolumeSpecName: "scripts") pod "4dccdfc3-432a-459c-a940-1d167d41405b" (UID: "4dccdfc3-432a-459c-a940-1d167d41405b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.216876 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "4dccdfc3-432a-459c-a940-1d167d41405b" (UID: "4dccdfc3-432a-459c-a940-1d167d41405b"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.260854 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4dccdfc3-432a-459c-a940-1d167d41405b" (UID: "4dccdfc3-432a-459c-a940-1d167d41405b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.284649 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-config-data" (OuterVolumeSpecName: "config-data") pod "4dccdfc3-432a-459c-a940-1d167d41405b" (UID: "4dccdfc3-432a-459c-a940-1d167d41405b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.291691 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.291758 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.291776 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.291789 4652 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4dccdfc3-432a-459c-a940-1d167d41405b-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.291803 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl4rv\" (UniqueName: \"kubernetes.io/projected/4dccdfc3-432a-459c-a940-1d167d41405b-kube-api-access-cl4rv\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.291814 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.291858 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.298039 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4dccdfc3-432a-459c-a940-1d167d41405b" (UID: "4dccdfc3-432a-459c-a940-1d167d41405b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.321376 4652 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.393197 4652 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dccdfc3-432a-459c-a940-1d167d41405b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.393222 4652 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.824618 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerStarted","Data":"bc88d5e8c6e378f7a9619ed0fd758f83b88003b50e3db657095be993a46eaf01"} Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.824972 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerStarted","Data":"eed850865a72e56cd120b42be226bb3bcf83ee807b3ec8960cdf94c64685a424"} Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.828748 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4dccdfc3-432a-459c-a940-1d167d41405b","Type":"ContainerDied","Data":"98a57cdf0db2b8e3448ea1972ec204cccd4ee9e37eb961bd91c82add6bc00147"} Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.828787 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.828804 4652 scope.go:117] "RemoveContainer" containerID="86e31d072f4e3486b357fe0bf2c9aadcf2e4861cdcc0f2e2ec938a7319aca2df" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.833120 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eac02c10-37cb-4ddc-9b04-010c4fe70817","Type":"ContainerStarted","Data":"eda7853bdc2b9b281deb905c3a8e095e71931569184d0eb902a0e6bea2495c3a"} Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.833149 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eac02c10-37cb-4ddc-9b04-010c4fe70817","Type":"ContainerStarted","Data":"47382c938ff133025b49594999ab603d740f9fe588c683c19ed45f82b5fa44ff"} Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.865625 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.865919 4652 scope.go:117] "RemoveContainer" containerID="8792eb6cec75227540052c82010fe2cb97fbbdd431c72ef97e27a65613474b8b" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.886015 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.892944 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:38 crc kubenswrapper[4652]: E1205 05:45:38.893609 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-httpd" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.893684 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-httpd" Dec 05 05:45:38 crc kubenswrapper[4652]: E1205 05:45:38.893766 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-log" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.893825 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-log" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.894057 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-log" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.894117 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" containerName="glance-httpd" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.895147 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.897775 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.909269 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 05:45:38 crc kubenswrapper[4652]: I1205 05:45:38.909880 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.012830 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.012871 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-config-data\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.012895 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.012922 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-logs\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.012936 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.012973 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.013046 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-scripts\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.013063 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr8dw\" (UniqueName: \"kubernetes.io/projected/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-kube-api-access-wr8dw\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.114474 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-config-data\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.114528 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.114602 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-logs\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.114624 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.114693 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.114841 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-scripts\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.114868 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr8dw\" (UniqueName: \"kubernetes.io/projected/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-kube-api-access-wr8dw\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.114979 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.115224 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.133040 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.133173 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.133341 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.133429 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-logs\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.138617 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-scripts\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.153577 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-config-data\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.169329 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr8dw\" (UniqueName: \"kubernetes.io/projected/58c5b24e-27a5-4bd8-adde-5bc3d97aca80-kube-api-access-wr8dw\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.183606 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"58c5b24e-27a5-4bd8-adde-5bc3d97aca80\") " pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.290721 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.767675 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 05:45:39 crc kubenswrapper[4652]: W1205 05:45:39.777912 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58c5b24e_27a5_4bd8_adde_5bc3d97aca80.slice/crio-e18d963fbe102e6cde2696946f99dccd8cb9eae9ef684a280111956b64c9211d WatchSource:0}: Error finding container e18d963fbe102e6cde2696946f99dccd8cb9eae9ef684a280111956b64c9211d: Status 404 returned error can't find the container with id e18d963fbe102e6cde2696946f99dccd8cb9eae9ef684a280111956b64c9211d Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.845176 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"58c5b24e-27a5-4bd8-adde-5bc3d97aca80","Type":"ContainerStarted","Data":"e18d963fbe102e6cde2696946f99dccd8cb9eae9ef684a280111956b64c9211d"} Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.847119 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eac02c10-37cb-4ddc-9b04-010c4fe70817","Type":"ContainerStarted","Data":"f6736f61745f99331e6458024813078bbd05e0b9ea2a27754b3ed24814a70a99"} Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.848946 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerStarted","Data":"c061ebdb7493e7998ef455a94de2d0f6d8691acfd6e19997517bfec03bd64575"} Dec 05 05:45:39 crc kubenswrapper[4652]: I1205 05:45:39.867623 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.867608336 podStartE2EDuration="3.867608336s" podCreationTimestamp="2025-12-05 05:45:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:39.861062675 +0000 UTC m=+1142.097792942" watchObservedRunningTime="2025-12-05 05:45:39.867608336 +0000 UTC m=+1142.104338603" Dec 05 05:45:40 crc kubenswrapper[4652]: I1205 05:45:40.139099 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dccdfc3-432a-459c-a940-1d167d41405b" path="/var/lib/kubelet/pods/4dccdfc3-432a-459c-a940-1d167d41405b/volumes" Dec 05 05:45:40 crc kubenswrapper[4652]: I1205 05:45:40.861357 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerStarted","Data":"b285bfc0334d57d03ed78443223c0646f0c3f36905a64a8503e1c16770b2d65f"} Dec 05 05:45:40 crc kubenswrapper[4652]: I1205 05:45:40.864280 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"58c5b24e-27a5-4bd8-adde-5bc3d97aca80","Type":"ContainerStarted","Data":"8ee52cfdd42ade67bc359ecf3e60597c22f3d9e4f9418c8ea7ea381d0ad2db80"} Dec 05 05:45:40 crc kubenswrapper[4652]: I1205 05:45:40.864325 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"58c5b24e-27a5-4bd8-adde-5bc3d97aca80","Type":"ContainerStarted","Data":"c7d6fe4b34a391f61dbf125491ac9f55af824c24bb113ded002272e238fa2231"} Dec 05 05:45:40 crc kubenswrapper[4652]: I1205 05:45:40.891877 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.891857592 podStartE2EDuration="2.891857592s" podCreationTimestamp="2025-12-05 05:45:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:40.879497403 +0000 UTC m=+1143.116227670" watchObservedRunningTime="2025-12-05 05:45:40.891857592 +0000 UTC m=+1143.128587858" Dec 05 05:45:41 crc kubenswrapper[4652]: I1205 05:45:41.891985 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerStarted","Data":"05d74307cd9595669de36c812efef3f3f2e87b9c5012a419eefc52b67be8cf02"} Dec 05 05:45:41 crc kubenswrapper[4652]: I1205 05:45:41.892298 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="ceilometer-central-agent" containerID="cri-o://bc88d5e8c6e378f7a9619ed0fd758f83b88003b50e3db657095be993a46eaf01" gracePeriod=30 Dec 05 05:45:41 crc kubenswrapper[4652]: I1205 05:45:41.892780 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="proxy-httpd" containerID="cri-o://05d74307cd9595669de36c812efef3f3f2e87b9c5012a419eefc52b67be8cf02" gracePeriod=30 Dec 05 05:45:41 crc kubenswrapper[4652]: I1205 05:45:41.892798 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="sg-core" containerID="cri-o://b285bfc0334d57d03ed78443223c0646f0c3f36905a64a8503e1c16770b2d65f" gracePeriod=30 Dec 05 05:45:41 crc kubenswrapper[4652]: I1205 05:45:41.892812 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="ceilometer-notification-agent" containerID="cri-o://c061ebdb7493e7998ef455a94de2d0f6d8691acfd6e19997517bfec03bd64575" gracePeriod=30 Dec 05 05:45:41 crc kubenswrapper[4652]: I1205 05:45:41.917892 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.274461944 podStartE2EDuration="5.917872426s" podCreationTimestamp="2025-12-05 05:45:36 +0000 UTC" firstStartedPulling="2025-12-05 05:45:37.819045742 +0000 UTC m=+1140.055776010" lastFinishedPulling="2025-12-05 05:45:41.462456235 +0000 UTC m=+1143.699186492" observedRunningTime="2025-12-05 05:45:41.911496854 +0000 UTC m=+1144.148227121" watchObservedRunningTime="2025-12-05 05:45:41.917872426 +0000 UTC m=+1144.154602693" Dec 05 05:45:42 crc kubenswrapper[4652]: E1205 05:45:42.045671 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55caee20_77b3_4137_b11a_60ed9aaf2de3.slice/crio-b285bfc0334d57d03ed78443223c0646f0c3f36905a64a8503e1c16770b2d65f.scope\": RecentStats: unable to find data in memory cache]" Dec 05 05:45:42 crc kubenswrapper[4652]: I1205 05:45:42.904528 4652 generic.go:334] "Generic (PLEG): container finished" podID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerID="05d74307cd9595669de36c812efef3f3f2e87b9c5012a419eefc52b67be8cf02" exitCode=0 Dec 05 05:45:42 crc kubenswrapper[4652]: I1205 05:45:42.904575 4652 generic.go:334] "Generic (PLEG): container finished" podID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerID="b285bfc0334d57d03ed78443223c0646f0c3f36905a64a8503e1c16770b2d65f" exitCode=2 Dec 05 05:45:42 crc kubenswrapper[4652]: I1205 05:45:42.904584 4652 generic.go:334] "Generic (PLEG): container finished" podID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerID="c061ebdb7493e7998ef455a94de2d0f6d8691acfd6e19997517bfec03bd64575" exitCode=0 Dec 05 05:45:42 crc kubenswrapper[4652]: I1205 05:45:42.904590 4652 generic.go:334] "Generic (PLEG): container finished" podID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerID="bc88d5e8c6e378f7a9619ed0fd758f83b88003b50e3db657095be993a46eaf01" exitCode=0 Dec 05 05:45:42 crc kubenswrapper[4652]: I1205 05:45:42.904613 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerDied","Data":"05d74307cd9595669de36c812efef3f3f2e87b9c5012a419eefc52b67be8cf02"} Dec 05 05:45:42 crc kubenswrapper[4652]: I1205 05:45:42.904639 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerDied","Data":"b285bfc0334d57d03ed78443223c0646f0c3f36905a64a8503e1c16770b2d65f"} Dec 05 05:45:42 crc kubenswrapper[4652]: I1205 05:45:42.904649 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerDied","Data":"c061ebdb7493e7998ef455a94de2d0f6d8691acfd6e19997517bfec03bd64575"} Dec 05 05:45:42 crc kubenswrapper[4652]: I1205 05:45:42.904659 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerDied","Data":"bc88d5e8c6e378f7a9619ed0fd758f83b88003b50e3db657095be993a46eaf01"} Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.832049 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.845177 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-sg-core-conf-yaml\") pod \"55caee20-77b3-4137-b11a-60ed9aaf2de3\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.845292 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q44c7\" (UniqueName: \"kubernetes.io/projected/55caee20-77b3-4137-b11a-60ed9aaf2de3-kube-api-access-q44c7\") pod \"55caee20-77b3-4137-b11a-60ed9aaf2de3\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.845354 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-combined-ca-bundle\") pod \"55caee20-77b3-4137-b11a-60ed9aaf2de3\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.845375 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-scripts\") pod \"55caee20-77b3-4137-b11a-60ed9aaf2de3\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.845434 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-run-httpd\") pod \"55caee20-77b3-4137-b11a-60ed9aaf2de3\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.846040 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "55caee20-77b3-4137-b11a-60ed9aaf2de3" (UID: "55caee20-77b3-4137-b11a-60ed9aaf2de3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.846967 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-config-data\") pod \"55caee20-77b3-4137-b11a-60ed9aaf2de3\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.847007 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-log-httpd\") pod \"55caee20-77b3-4137-b11a-60ed9aaf2de3\" (UID: \"55caee20-77b3-4137-b11a-60ed9aaf2de3\") " Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.847415 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "55caee20-77b3-4137-b11a-60ed9aaf2de3" (UID: "55caee20-77b3-4137-b11a-60ed9aaf2de3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.847992 4652 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.848010 4652 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/55caee20-77b3-4137-b11a-60ed9aaf2de3-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.850661 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55caee20-77b3-4137-b11a-60ed9aaf2de3-kube-api-access-q44c7" (OuterVolumeSpecName: "kube-api-access-q44c7") pod "55caee20-77b3-4137-b11a-60ed9aaf2de3" (UID: "55caee20-77b3-4137-b11a-60ed9aaf2de3"). InnerVolumeSpecName "kube-api-access-q44c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.856190 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-scripts" (OuterVolumeSpecName: "scripts") pod "55caee20-77b3-4137-b11a-60ed9aaf2de3" (UID: "55caee20-77b3-4137-b11a-60ed9aaf2de3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.869497 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "55caee20-77b3-4137-b11a-60ed9aaf2de3" (UID: "55caee20-77b3-4137-b11a-60ed9aaf2de3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.902740 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55caee20-77b3-4137-b11a-60ed9aaf2de3" (UID: "55caee20-77b3-4137-b11a-60ed9aaf2de3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.926678 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-config-data" (OuterVolumeSpecName: "config-data") pod "55caee20-77b3-4137-b11a-60ed9aaf2de3" (UID: "55caee20-77b3-4137-b11a-60ed9aaf2de3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.930059 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6cmts" event={"ID":"3516f65d-a205-4768-b345-cee580a16b09","Type":"ContainerStarted","Data":"8f18516a28495baf1bacb0f34032551481833faf5ab6afce86722d2ada2c23dd"} Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.932940 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"55caee20-77b3-4137-b11a-60ed9aaf2de3","Type":"ContainerDied","Data":"eed850865a72e56cd120b42be226bb3bcf83ee807b3ec8960cdf94c64685a424"} Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.932981 4652 scope.go:117] "RemoveContainer" containerID="05d74307cd9595669de36c812efef3f3f2e87b9c5012a419eefc52b67be8cf02" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.933097 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.950404 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-6cmts" podStartSLOduration=2.064494501 podStartE2EDuration="16.950388201s" podCreationTimestamp="2025-12-05 05:45:29 +0000 UTC" firstStartedPulling="2025-12-05 05:45:30.761695953 +0000 UTC m=+1132.998426220" lastFinishedPulling="2025-12-05 05:45:45.647589654 +0000 UTC m=+1147.884319920" observedRunningTime="2025-12-05 05:45:45.942706305 +0000 UTC m=+1148.179436562" watchObservedRunningTime="2025-12-05 05:45:45.950388201 +0000 UTC m=+1148.187118468" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.951477 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.951524 4652 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.951536 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q44c7\" (UniqueName: \"kubernetes.io/projected/55caee20-77b3-4137-b11a-60ed9aaf2de3-kube-api-access-q44c7\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.951545 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.951566 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55caee20-77b3-4137-b11a-60ed9aaf2de3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.952916 4652 scope.go:117] "RemoveContainer" containerID="b285bfc0334d57d03ed78443223c0646f0c3f36905a64a8503e1c16770b2d65f" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.962808 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.968130 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.980122 4652 scope.go:117] "RemoveContainer" containerID="c061ebdb7493e7998ef455a94de2d0f6d8691acfd6e19997517bfec03bd64575" Dec 05 05:45:45 crc kubenswrapper[4652]: I1205 05:45:45.999325 4652 scope.go:117] "RemoveContainer" containerID="bc88d5e8c6e378f7a9619ed0fd758f83b88003b50e3db657095be993a46eaf01" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.013922 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:46 crc kubenswrapper[4652]: E1205 05:45:46.015712 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="ceilometer-notification-agent" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.015752 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="ceilometer-notification-agent" Dec 05 05:45:46 crc kubenswrapper[4652]: E1205 05:45:46.015780 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="ceilometer-central-agent" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.015787 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="ceilometer-central-agent" Dec 05 05:45:46 crc kubenswrapper[4652]: E1205 05:45:46.015827 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="sg-core" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.015834 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="sg-core" Dec 05 05:45:46 crc kubenswrapper[4652]: E1205 05:45:46.015848 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="proxy-httpd" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.015853 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="proxy-httpd" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.016586 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="proxy-httpd" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.016612 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="ceilometer-central-agent" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.016662 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="ceilometer-notification-agent" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.016671 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" containerName="sg-core" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.021186 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.023843 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.023848 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.025525 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.053186 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-run-httpd\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.053235 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx98h\" (UniqueName: \"kubernetes.io/projected/a3dc9f7a-1d96-4943-995e-9765861a32c0-kube-api-access-sx98h\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.053335 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-log-httpd\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.053375 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-config-data\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.053440 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-scripts\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.053508 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.053531 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.133688 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55caee20-77b3-4137-b11a-60ed9aaf2de3" path="/var/lib/kubelet/pods/55caee20-77b3-4137-b11a-60ed9aaf2de3/volumes" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.154958 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-scripts\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.154995 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.155019 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.155511 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-run-httpd\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.155545 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx98h\" (UniqueName: \"kubernetes.io/projected/a3dc9f7a-1d96-4943-995e-9765861a32c0-kube-api-access-sx98h\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.155724 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-log-httpd\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.155842 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-config-data\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.155954 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-run-httpd\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.156089 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-log-httpd\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.158234 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.158943 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-scripts\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.159275 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-config-data\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.159346 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.168747 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx98h\" (UniqueName: \"kubernetes.io/projected/a3dc9f7a-1d96-4943-995e-9765861a32c0-kube-api-access-sx98h\") pod \"ceilometer-0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.337630 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.750508 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:46 crc kubenswrapper[4652]: I1205 05:45:46.951700 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerStarted","Data":"45f3238abc0261dcee226cfa42cc7ba6c566d452da0cfbfc2ef19d8dda577630"} Dec 05 05:45:47 crc kubenswrapper[4652]: I1205 05:45:47.205163 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 05:45:47 crc kubenswrapper[4652]: I1205 05:45:47.205386 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 05:45:47 crc kubenswrapper[4652]: I1205 05:45:47.229461 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 05:45:47 crc kubenswrapper[4652]: I1205 05:45:47.235879 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 05:45:47 crc kubenswrapper[4652]: I1205 05:45:47.960375 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerStarted","Data":"65bcbe4b093392356997e024d7c837faccfc88c829ef905a59a8865820a2588b"} Dec 05 05:45:47 crc kubenswrapper[4652]: I1205 05:45:47.960704 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 05:45:47 crc kubenswrapper[4652]: I1205 05:45:47.960720 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 05:45:48 crc kubenswrapper[4652]: I1205 05:45:48.968888 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerStarted","Data":"a4477c831f463d3d879e9f718ce33ab5a312877b9173b72af7838aee926dd637"} Dec 05 05:45:49 crc kubenswrapper[4652]: I1205 05:45:49.291218 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:49 crc kubenswrapper[4652]: I1205 05:45:49.291681 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:49 crc kubenswrapper[4652]: I1205 05:45:49.346196 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:49 crc kubenswrapper[4652]: I1205 05:45:49.351979 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:49 crc kubenswrapper[4652]: I1205 05:45:49.582106 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 05:45:49 crc kubenswrapper[4652]: I1205 05:45:49.625566 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 05:45:49 crc kubenswrapper[4652]: I1205 05:45:49.977654 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:49 crc kubenswrapper[4652]: I1205 05:45:49.978578 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:50 crc kubenswrapper[4652]: I1205 05:45:50.677514 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:45:50 crc kubenswrapper[4652]: I1205 05:45:50.678055 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" containerID="cri-o://b83ad4af98342e82451a246dbb5c9c80a4ca4ce922da71c27da3b2c387289f0d" gracePeriod=30 Dec 05 05:45:51 crc kubenswrapper[4652]: I1205 05:45:51.654055 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:51 crc kubenswrapper[4652]: I1205 05:45:51.674012 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 05:45:51 crc kubenswrapper[4652]: I1205 05:45:51.995739 4652 generic.go:334] "Generic (PLEG): container finished" podID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerID="b83ad4af98342e82451a246dbb5c9c80a4ca4ce922da71c27da3b2c387289f0d" exitCode=0 Dec 05 05:45:51 crc kubenswrapper[4652]: I1205 05:45:51.995817 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerDied","Data":"b83ad4af98342e82451a246dbb5c9c80a4ca4ce922da71c27da3b2c387289f0d"} Dec 05 05:45:51 crc kubenswrapper[4652]: I1205 05:45:51.995861 4652 scope.go:117] "RemoveContainer" containerID="7d19c23409283640cb1bd3a0d34c8d831faf4762b4108458c1b8b95e16face3c" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.028505 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.506299 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.589226 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-combined-ca-bundle\") pod \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.589269 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca914a04-6a6f-4b20-af32-e0771a7dffa5-logs\") pod \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.589411 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-config-data\") pod \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.589433 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-custom-prometheus-ca\") pod \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.589487 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjlf5\" (UniqueName: \"kubernetes.io/projected/ca914a04-6a6f-4b20-af32-e0771a7dffa5-kube-api-access-sjlf5\") pod \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\" (UID: \"ca914a04-6a6f-4b20-af32-e0771a7dffa5\") " Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.589980 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca914a04-6a6f-4b20-af32-e0771a7dffa5-logs" (OuterVolumeSpecName: "logs") pod "ca914a04-6a6f-4b20-af32-e0771a7dffa5" (UID: "ca914a04-6a6f-4b20-af32-e0771a7dffa5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.592866 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca914a04-6a6f-4b20-af32-e0771a7dffa5-kube-api-access-sjlf5" (OuterVolumeSpecName: "kube-api-access-sjlf5") pod "ca914a04-6a6f-4b20-af32-e0771a7dffa5" (UID: "ca914a04-6a6f-4b20-af32-e0771a7dffa5"). InnerVolumeSpecName "kube-api-access-sjlf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.610748 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "ca914a04-6a6f-4b20-af32-e0771a7dffa5" (UID: "ca914a04-6a6f-4b20-af32-e0771a7dffa5"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.611941 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca914a04-6a6f-4b20-af32-e0771a7dffa5" (UID: "ca914a04-6a6f-4b20-af32-e0771a7dffa5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.628675 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-config-data" (OuterVolumeSpecName: "config-data") pod "ca914a04-6a6f-4b20-af32-e0771a7dffa5" (UID: "ca914a04-6a6f-4b20-af32-e0771a7dffa5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.692270 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjlf5\" (UniqueName: \"kubernetes.io/projected/ca914a04-6a6f-4b20-af32-e0771a7dffa5-kube-api-access-sjlf5\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.692296 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.692307 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca914a04-6a6f-4b20-af32-e0771a7dffa5-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.692315 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:52 crc kubenswrapper[4652]: I1205 05:45:52.692322 4652 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/ca914a04-6a6f-4b20-af32-e0771a7dffa5-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.005417 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.005414 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"ca914a04-6a6f-4b20-af32-e0771a7dffa5","Type":"ContainerDied","Data":"f52fa9fa118a728aa94d83e2dcd0e592d628993e3ef1b5cfa0bb2c69dd697693"} Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.005833 4652 scope.go:117] "RemoveContainer" containerID="b83ad4af98342e82451a246dbb5c9c80a4ca4ce922da71c27da3b2c387289f0d" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.030280 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerStarted","Data":"458fdd5791d5738328580b67ef5993b1b7451eec2ac452b0d6c30375d9840177"} Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.044955 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.064122 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.083587 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:45:53 crc kubenswrapper[4652]: E1205 05:45:53.084020 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.084038 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: E1205 05:45:53.084047 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.084058 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: E1205 05:45:53.084073 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.084079 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.084292 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.084321 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.084329 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.085052 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.086104 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.091439 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.203023 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6lgh\" (UniqueName: \"kubernetes.io/projected/077f273d-5e79-49f7-a780-059832c13655-kube-api-access-l6lgh\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.203063 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.203203 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/077f273d-5e79-49f7-a780-059832c13655-logs\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.203263 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.203423 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-config-data\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.306305 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6lgh\" (UniqueName: \"kubernetes.io/projected/077f273d-5e79-49f7-a780-059832c13655-kube-api-access-l6lgh\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.306422 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.306534 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/077f273d-5e79-49f7-a780-059832c13655-logs\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.306695 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.306804 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-config-data\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.306864 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/077f273d-5e79-49f7-a780-059832c13655-logs\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.310342 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.310538 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.313101 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/077f273d-5e79-49f7-a780-059832c13655-config-data\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.322757 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6lgh\" (UniqueName: \"kubernetes.io/projected/077f273d-5e79-49f7-a780-059832c13655-kube-api-access-l6lgh\") pod \"watcher-decision-engine-0\" (UID: \"077f273d-5e79-49f7-a780-059832c13655\") " pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.402845 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Dec 05 05:45:53 crc kubenswrapper[4652]: I1205 05:45:53.827121 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Dec 05 05:45:53 crc kubenswrapper[4652]: W1205 05:45:53.828652 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod077f273d_5e79_49f7_a780_059832c13655.slice/crio-da9974729f02cbd6852bbd4f3c85bdf75d8a2a5facdd2a0a309f82ae128d6d85 WatchSource:0}: Error finding container da9974729f02cbd6852bbd4f3c85bdf75d8a2a5facdd2a0a309f82ae128d6d85: Status 404 returned error can't find the container with id da9974729f02cbd6852bbd4f3c85bdf75d8a2a5facdd2a0a309f82ae128d6d85 Dec 05 05:45:54 crc kubenswrapper[4652]: I1205 05:45:54.038940 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"077f273d-5e79-49f7-a780-059832c13655","Type":"ContainerStarted","Data":"4ef31ab248169b42d3d2c1387dc9b18c18d073c3d5e0f13b86176e5e245bcf51"} Dec 05 05:45:54 crc kubenswrapper[4652]: I1205 05:45:54.039723 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"077f273d-5e79-49f7-a780-059832c13655","Type":"ContainerStarted","Data":"da9974729f02cbd6852bbd4f3c85bdf75d8a2a5facdd2a0a309f82ae128d6d85"} Dec 05 05:45:54 crc kubenswrapper[4652]: I1205 05:45:54.043861 4652 generic.go:334] "Generic (PLEG): container finished" podID="3516f65d-a205-4768-b345-cee580a16b09" containerID="8f18516a28495baf1bacb0f34032551481833faf5ab6afce86722d2ada2c23dd" exitCode=0 Dec 05 05:45:54 crc kubenswrapper[4652]: I1205 05:45:54.043904 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6cmts" event={"ID":"3516f65d-a205-4768-b345-cee580a16b09","Type":"ContainerDied","Data":"8f18516a28495baf1bacb0f34032551481833faf5ab6afce86722d2ada2c23dd"} Dec 05 05:45:54 crc kubenswrapper[4652]: I1205 05:45:54.056303 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=1.056289779 podStartE2EDuration="1.056289779s" podCreationTimestamp="2025-12-05 05:45:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:54.053024704 +0000 UTC m=+1156.289754970" watchObservedRunningTime="2025-12-05 05:45:54.056289779 +0000 UTC m=+1156.293020046" Dec 05 05:45:54 crc kubenswrapper[4652]: I1205 05:45:54.141097 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" path="/var/lib/kubelet/pods/ca914a04-6a6f-4b20-af32-e0771a7dffa5/volumes" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.052799 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerStarted","Data":"373d6f73b6f40a3fa2966065604f864209bb95e32806141a4f866f057cb5eaf9"} Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.053130 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="ceilometer-central-agent" containerID="cri-o://65bcbe4b093392356997e024d7c837faccfc88c829ef905a59a8865820a2588b" gracePeriod=30 Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.053145 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="sg-core" containerID="cri-o://458fdd5791d5738328580b67ef5993b1b7451eec2ac452b0d6c30375d9840177" gracePeriod=30 Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.053165 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="ceilometer-notification-agent" containerID="cri-o://a4477c831f463d3d879e9f718ce33ab5a312877b9173b72af7838aee926dd637" gracePeriod=30 Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.053145 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="proxy-httpd" containerID="cri-o://373d6f73b6f40a3fa2966065604f864209bb95e32806141a4f866f057cb5eaf9" gracePeriod=30 Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.075250 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.696632861 podStartE2EDuration="10.075228847s" podCreationTimestamp="2025-12-05 05:45:45 +0000 UTC" firstStartedPulling="2025-12-05 05:45:46.751695735 +0000 UTC m=+1148.988426002" lastFinishedPulling="2025-12-05 05:45:54.130291721 +0000 UTC m=+1156.367021988" observedRunningTime="2025-12-05 05:45:55.072486201 +0000 UTC m=+1157.309216469" watchObservedRunningTime="2025-12-05 05:45:55.075228847 +0000 UTC m=+1157.311959113" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.418804 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.551700 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27wcp\" (UniqueName: \"kubernetes.io/projected/3516f65d-a205-4768-b345-cee580a16b09-kube-api-access-27wcp\") pod \"3516f65d-a205-4768-b345-cee580a16b09\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.552053 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-scripts\") pod \"3516f65d-a205-4768-b345-cee580a16b09\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.552108 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-config-data\") pod \"3516f65d-a205-4768-b345-cee580a16b09\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.552171 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-combined-ca-bundle\") pod \"3516f65d-a205-4768-b345-cee580a16b09\" (UID: \"3516f65d-a205-4768-b345-cee580a16b09\") " Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.560982 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-scripts" (OuterVolumeSpecName: "scripts") pod "3516f65d-a205-4768-b345-cee580a16b09" (UID: "3516f65d-a205-4768-b345-cee580a16b09"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.564728 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3516f65d-a205-4768-b345-cee580a16b09-kube-api-access-27wcp" (OuterVolumeSpecName: "kube-api-access-27wcp") pod "3516f65d-a205-4768-b345-cee580a16b09" (UID: "3516f65d-a205-4768-b345-cee580a16b09"). InnerVolumeSpecName "kube-api-access-27wcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.589193 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3516f65d-a205-4768-b345-cee580a16b09" (UID: "3516f65d-a205-4768-b345-cee580a16b09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.596533 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-config-data" (OuterVolumeSpecName: "config-data") pod "3516f65d-a205-4768-b345-cee580a16b09" (UID: "3516f65d-a205-4768-b345-cee580a16b09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.654177 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.654207 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.654220 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3516f65d-a205-4768-b345-cee580a16b09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:55 crc kubenswrapper[4652]: I1205 05:45:55.654232 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27wcp\" (UniqueName: \"kubernetes.io/projected/3516f65d-a205-4768-b345-cee580a16b09-kube-api-access-27wcp\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.061073 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6cmts" event={"ID":"3516f65d-a205-4768-b345-cee580a16b09","Type":"ContainerDied","Data":"1a133634d1ada4590cbb18d31b99abae52e4b9761d617bade728862abfef775b"} Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.061109 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a133634d1ada4590cbb18d31b99abae52e4b9761d617bade728862abfef775b" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.061158 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6cmts" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.071616 4652 generic.go:334] "Generic (PLEG): container finished" podID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerID="373d6f73b6f40a3fa2966065604f864209bb95e32806141a4f866f057cb5eaf9" exitCode=0 Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.071648 4652 generic.go:334] "Generic (PLEG): container finished" podID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerID="458fdd5791d5738328580b67ef5993b1b7451eec2ac452b0d6c30375d9840177" exitCode=2 Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.071658 4652 generic.go:334] "Generic (PLEG): container finished" podID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerID="a4477c831f463d3d879e9f718ce33ab5a312877b9173b72af7838aee926dd637" exitCode=0 Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.071668 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerDied","Data":"373d6f73b6f40a3fa2966065604f864209bb95e32806141a4f866f057cb5eaf9"} Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.071691 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerDied","Data":"458fdd5791d5738328580b67ef5993b1b7451eec2ac452b0d6c30375d9840177"} Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.071701 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerDied","Data":"a4477c831f463d3d879e9f718ce33ab5a312877b9173b72af7838aee926dd637"} Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.152278 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 05:45:56 crc kubenswrapper[4652]: E1205 05:45:56.152660 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3516f65d-a205-4768-b345-cee580a16b09" containerName="nova-cell0-conductor-db-sync" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.152680 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3516f65d-a205-4768-b345-cee580a16b09" containerName="nova-cell0-conductor-db-sync" Dec 05 05:45:56 crc kubenswrapper[4652]: E1205 05:45:56.152720 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.152726 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.152894 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca914a04-6a6f-4b20-af32-e0771a7dffa5" containerName="watcher-decision-engine" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.152914 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="3516f65d-a205-4768-b345-cee580a16b09" containerName="nova-cell0-conductor-db-sync" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.153531 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.155817 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5wsr5" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.156004 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.164194 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.263680 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.263722 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85lgx\" (UniqueName: \"kubernetes.io/projected/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-kube-api-access-85lgx\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.263761 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.365419 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85lgx\" (UniqueName: \"kubernetes.io/projected/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-kube-api-access-85lgx\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.365741 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.365951 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.369632 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.369754 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.384148 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85lgx\" (UniqueName: \"kubernetes.io/projected/4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb-kube-api-access-85lgx\") pod \"nova-cell0-conductor-0\" (UID: \"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb\") " pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.467951 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:56 crc kubenswrapper[4652]: I1205 05:45:56.860050 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 05:45:56 crc kubenswrapper[4652]: W1205 05:45:56.870026 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cbffe7e_c5f0_4e24_b01d_e79d5df6dceb.slice/crio-27a8aac1df166083b4556f92b9925409010f4485a849245a7cb0f5cfd714e4c2 WatchSource:0}: Error finding container 27a8aac1df166083b4556f92b9925409010f4485a849245a7cb0f5cfd714e4c2: Status 404 returned error can't find the container with id 27a8aac1df166083b4556f92b9925409010f4485a849245a7cb0f5cfd714e4c2 Dec 05 05:45:57 crc kubenswrapper[4652]: I1205 05:45:57.087521 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb","Type":"ContainerStarted","Data":"cbf722d1452641dd739477a7da1bcd22283a0484176ef484ee18802c6598c74a"} Dec 05 05:45:57 crc kubenswrapper[4652]: I1205 05:45:57.088633 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 05:45:57 crc kubenswrapper[4652]: I1205 05:45:57.088730 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb","Type":"ContainerStarted","Data":"27a8aac1df166083b4556f92b9925409010f4485a849245a7cb0f5cfd714e4c2"} Dec 05 05:45:57 crc kubenswrapper[4652]: I1205 05:45:57.102240 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.10222576 podStartE2EDuration="1.10222576s" podCreationTimestamp="2025-12-05 05:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:45:57.101610434 +0000 UTC m=+1159.338340702" watchObservedRunningTime="2025-12-05 05:45:57.10222576 +0000 UTC m=+1159.338956027" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.103668 4652 generic.go:334] "Generic (PLEG): container finished" podID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerID="65bcbe4b093392356997e024d7c837faccfc88c829ef905a59a8865820a2588b" exitCode=0 Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.103736 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerDied","Data":"65bcbe4b093392356997e024d7c837faccfc88c829ef905a59a8865820a2588b"} Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.103913 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a3dc9f7a-1d96-4943-995e-9765861a32c0","Type":"ContainerDied","Data":"45f3238abc0261dcee226cfa42cc7ba6c566d452da0cfbfc2ef19d8dda577630"} Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.103928 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45f3238abc0261dcee226cfa42cc7ba6c566d452da0cfbfc2ef19d8dda577630" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.109863 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.223809 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-sg-core-conf-yaml\") pod \"a3dc9f7a-1d96-4943-995e-9765861a32c0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.223912 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-scripts\") pod \"a3dc9f7a-1d96-4943-995e-9765861a32c0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.223983 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-combined-ca-bundle\") pod \"a3dc9f7a-1d96-4943-995e-9765861a32c0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.224039 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sx98h\" (UniqueName: \"kubernetes.io/projected/a3dc9f7a-1d96-4943-995e-9765861a32c0-kube-api-access-sx98h\") pod \"a3dc9f7a-1d96-4943-995e-9765861a32c0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.224079 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-config-data\") pod \"a3dc9f7a-1d96-4943-995e-9765861a32c0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.224140 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-log-httpd\") pod \"a3dc9f7a-1d96-4943-995e-9765861a32c0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.224216 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-run-httpd\") pod \"a3dc9f7a-1d96-4943-995e-9765861a32c0\" (UID: \"a3dc9f7a-1d96-4943-995e-9765861a32c0\") " Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.224599 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a3dc9f7a-1d96-4943-995e-9765861a32c0" (UID: "a3dc9f7a-1d96-4943-995e-9765861a32c0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.224635 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a3dc9f7a-1d96-4943-995e-9765861a32c0" (UID: "a3dc9f7a-1d96-4943-995e-9765861a32c0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.225389 4652 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.225413 4652 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3dc9f7a-1d96-4943-995e-9765861a32c0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.230822 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3dc9f7a-1d96-4943-995e-9765861a32c0-kube-api-access-sx98h" (OuterVolumeSpecName: "kube-api-access-sx98h") pod "a3dc9f7a-1d96-4943-995e-9765861a32c0" (UID: "a3dc9f7a-1d96-4943-995e-9765861a32c0"). InnerVolumeSpecName "kube-api-access-sx98h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.230828 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-scripts" (OuterVolumeSpecName: "scripts") pod "a3dc9f7a-1d96-4943-995e-9765861a32c0" (UID: "a3dc9f7a-1d96-4943-995e-9765861a32c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.246342 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a3dc9f7a-1d96-4943-995e-9765861a32c0" (UID: "a3dc9f7a-1d96-4943-995e-9765861a32c0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.282831 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3dc9f7a-1d96-4943-995e-9765861a32c0" (UID: "a3dc9f7a-1d96-4943-995e-9765861a32c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.299928 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-config-data" (OuterVolumeSpecName: "config-data") pod "a3dc9f7a-1d96-4943-995e-9765861a32c0" (UID: "a3dc9f7a-1d96-4943-995e-9765861a32c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.327322 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.327353 4652 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.327364 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.327372 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3dc9f7a-1d96-4943-995e-9765861a32c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:45:59 crc kubenswrapper[4652]: I1205 05:45:59.327381 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sx98h\" (UniqueName: \"kubernetes.io/projected/a3dc9f7a-1d96-4943-995e-9765861a32c0-kube-api-access-sx98h\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.111322 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.143578 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.152678 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.162325 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:00 crc kubenswrapper[4652]: E1205 05:46:00.162829 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="ceilometer-central-agent" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.162848 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="ceilometer-central-agent" Dec 05 05:46:00 crc kubenswrapper[4652]: E1205 05:46:00.162864 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="sg-core" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.162870 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="sg-core" Dec 05 05:46:00 crc kubenswrapper[4652]: E1205 05:46:00.162884 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="ceilometer-notification-agent" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.162890 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="ceilometer-notification-agent" Dec 05 05:46:00 crc kubenswrapper[4652]: E1205 05:46:00.162898 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="proxy-httpd" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.162903 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="proxy-httpd" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.163077 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="sg-core" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.163086 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="ceilometer-central-agent" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.163098 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="proxy-httpd" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.163111 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" containerName="ceilometer-notification-agent" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.164760 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.168383 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.168669 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.170396 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.340901 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-log-httpd\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.340982 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-scripts\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.341042 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj2vq\" (UniqueName: \"kubernetes.io/projected/e50d66ad-cabe-4917-a690-145be1633551-kube-api-access-xj2vq\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.341074 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-run-httpd\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.341381 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-config-data\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.341451 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.341494 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.443546 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-config-data\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.443609 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.443638 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.443685 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-log-httpd\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.443731 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-scripts\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.443774 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj2vq\" (UniqueName: \"kubernetes.io/projected/e50d66ad-cabe-4917-a690-145be1633551-kube-api-access-xj2vq\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.443795 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-run-httpd\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.444256 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-log-httpd\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.444350 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-run-httpd\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.447395 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.447795 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-config-data\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.447933 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.448043 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-scripts\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.460694 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj2vq\" (UniqueName: \"kubernetes.io/projected/e50d66ad-cabe-4917-a690-145be1633551-kube-api-access-xj2vq\") pod \"ceilometer-0\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " pod="openstack/ceilometer-0" Dec 05 05:46:00 crc kubenswrapper[4652]: I1205 05:46:00.484441 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:01 crc kubenswrapper[4652]: I1205 05:46:00.868076 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:01 crc kubenswrapper[4652]: W1205 05:46:00.868220 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode50d66ad_cabe_4917_a690_145be1633551.slice/crio-44a01b34264039a3490b2b79082488cde693f1bfdfc27c5b99270efcaad1f212 WatchSource:0}: Error finding container 44a01b34264039a3490b2b79082488cde693f1bfdfc27c5b99270efcaad1f212: Status 404 returned error can't find the container with id 44a01b34264039a3490b2b79082488cde693f1bfdfc27c5b99270efcaad1f212 Dec 05 05:46:01 crc kubenswrapper[4652]: I1205 05:46:01.119520 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerStarted","Data":"44a01b34264039a3490b2b79082488cde693f1bfdfc27c5b99270efcaad1f212"} Dec 05 05:46:02 crc kubenswrapper[4652]: I1205 05:46:02.136122 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3dc9f7a-1d96-4943-995e-9765861a32c0" path="/var/lib/kubelet/pods/a3dc9f7a-1d96-4943-995e-9765861a32c0/volumes" Dec 05 05:46:02 crc kubenswrapper[4652]: I1205 05:46:02.137352 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerStarted","Data":"7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7"} Dec 05 05:46:03 crc kubenswrapper[4652]: I1205 05:46:03.160381 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerStarted","Data":"d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2"} Dec 05 05:46:03 crc kubenswrapper[4652]: I1205 05:46:03.205783 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 05:46:03 crc kubenswrapper[4652]: I1205 05:46:03.404007 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Dec 05 05:46:03 crc kubenswrapper[4652]: I1205 05:46:03.425295 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Dec 05 05:46:04 crc kubenswrapper[4652]: I1205 05:46:04.170893 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerStarted","Data":"c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b"} Dec 05 05:46:04 crc kubenswrapper[4652]: I1205 05:46:04.171075 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Dec 05 05:46:04 crc kubenswrapper[4652]: I1205 05:46:04.191617 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Dec 05 05:46:05 crc kubenswrapper[4652]: I1205 05:46:05.179599 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerStarted","Data":"8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236"} Dec 05 05:46:05 crc kubenswrapper[4652]: I1205 05:46:05.194902 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.540452025 podStartE2EDuration="5.194886575s" podCreationTimestamp="2025-12-05 05:46:00 +0000 UTC" firstStartedPulling="2025-12-05 05:46:00.869703281 +0000 UTC m=+1163.106433548" lastFinishedPulling="2025-12-05 05:46:04.524137831 +0000 UTC m=+1166.760868098" observedRunningTime="2025-12-05 05:46:05.194333967 +0000 UTC m=+1167.431064234" watchObservedRunningTime="2025-12-05 05:46:05.194886575 +0000 UTC m=+1167.431616832" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.191287 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.489650 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.894249 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-8dh2n"] Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.895536 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.897637 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.897647 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.903683 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-8dh2n"] Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.966789 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-config-data\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.966838 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-scripts\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.966899 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:06 crc kubenswrapper[4652]: I1205 05:46:06.967027 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7ftd\" (UniqueName: \"kubernetes.io/projected/f49d0a16-f608-4d69-af94-2e84fc4dee10-kube-api-access-s7ftd\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.007773 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.009077 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.013399 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.035138 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.069077 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-config-data\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.069116 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-scripts\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.069170 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.069241 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.069257 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grst5\" (UniqueName: \"kubernetes.io/projected/d07e49ea-3586-42bb-9af9-0f68d9823fec-kube-api-access-grst5\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.069275 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-config-data\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.069318 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7ftd\" (UniqueName: \"kubernetes.io/projected/f49d0a16-f608-4d69-af94-2e84fc4dee10-kube-api-access-s7ftd\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.075057 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-config-data\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.076386 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-scripts\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.076888 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.080296 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.083803 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.086572 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.092235 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7ftd\" (UniqueName: \"kubernetes.io/projected/f49d0a16-f608-4d69-af94-2e84fc4dee10-kube-api-access-s7ftd\") pod \"nova-cell0-cell-mapping-8dh2n\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.097331 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.150166 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.151426 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.161427 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.172803 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-config-data\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.172899 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-config-data\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.172921 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmrtp\" (UniqueName: \"kubernetes.io/projected/81712ea8-c61d-4966-b51f-643b52063167-kube-api-access-cmrtp\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.173230 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81712ea8-c61d-4966-b51f-643b52063167-logs\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.173258 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.173312 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grst5\" (UniqueName: \"kubernetes.io/projected/d07e49ea-3586-42bb-9af9-0f68d9823fec-kube-api-access-grst5\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.173327 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.179869 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.185915 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.186904 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-config-data\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.226178 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.227174 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grst5\" (UniqueName: \"kubernetes.io/projected/d07e49ea-3586-42bb-9af9-0f68d9823fec-kube-api-access-grst5\") pod \"nova-scheduler-0\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.269607 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.271235 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.280180 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.281426 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.281496 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kr7g\" (UniqueName: \"kubernetes.io/projected/579de9cf-880a-49ee-a42d-6569627d4caa-kube-api-access-6kr7g\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.281677 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81712ea8-c61d-4966-b51f-643b52063167-logs\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.281697 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.281716 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.281812 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-config-data\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.281855 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmrtp\" (UniqueName: \"kubernetes.io/projected/81712ea8-c61d-4966-b51f-643b52063167-kube-api-access-cmrtp\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.282498 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81712ea8-c61d-4966-b51f-643b52063167-logs\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.289063 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.289153 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-config-data\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.289598 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.305264 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmrtp\" (UniqueName: \"kubernetes.io/projected/81712ea8-c61d-4966-b51f-643b52063167-kube-api-access-cmrtp\") pod \"nova-api-0\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.337999 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.355626 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-r5bbx"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.357668 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.377592 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-r5bbx"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.383847 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.383896 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkkmv\" (UniqueName: \"kubernetes.io/projected/52d17769-58d5-42fc-b559-291688d65e31-kube-api-access-wkkmv\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.383922 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kr7g\" (UniqueName: \"kubernetes.io/projected/579de9cf-880a-49ee-a42d-6569627d4caa-kube-api-access-6kr7g\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.383999 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-config-data\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.384017 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52d17769-58d5-42fc-b559-291688d65e31-logs\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.384103 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.384219 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.390029 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.393583 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.398095 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kr7g\" (UniqueName: \"kubernetes.io/projected/579de9cf-880a-49ee-a42d-6569627d4caa-kube-api-access-6kr7g\") pod \"nova-cell1-novncproxy-0\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.486089 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-svc\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.489582 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkkmv\" (UniqueName: \"kubernetes.io/projected/52d17769-58d5-42fc-b559-291688d65e31-kube-api-access-wkkmv\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.489727 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-config-data\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.489752 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52d17769-58d5-42fc-b559-291688d65e31-logs\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.489788 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-config\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.489880 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-nb\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.489920 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-swift-storage-0\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.489985 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-sb\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.490035 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spfh7\" (UniqueName: \"kubernetes.io/projected/d5e284f4-329e-47be-862a-96dd0d66c5fd-kube-api-access-spfh7\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.490076 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.490297 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52d17769-58d5-42fc-b559-291688d65e31-logs\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.493885 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-config-data\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.497377 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.504902 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkkmv\" (UniqueName: \"kubernetes.io/projected/52d17769-58d5-42fc-b559-291688d65e31-kube-api-access-wkkmv\") pod \"nova-metadata-0\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.590364 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.591995 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-config\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.592057 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-nb\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.592086 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-swift-storage-0\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.592123 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-sb\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.592158 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spfh7\" (UniqueName: \"kubernetes.io/projected/d5e284f4-329e-47be-862a-96dd0d66c5fd-kube-api-access-spfh7\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.592214 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-svc\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.593485 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-svc\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.593845 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-nb\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.593946 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-sb\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.594663 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-swift-storage-0\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.595138 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-config\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.605304 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spfh7\" (UniqueName: \"kubernetes.io/projected/d5e284f4-329e-47be-862a-96dd0d66c5fd-kube-api-access-spfh7\") pod \"dnsmasq-dns-844fc57f6f-r5bbx\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.621484 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.634910 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.700119 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.857141 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-8dh2n"] Dec 05 05:46:07 crc kubenswrapper[4652]: I1205 05:46:07.920197 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:08 crc kubenswrapper[4652]: W1205 05:46:08.142528 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81712ea8_c61d_4966_b51f_643b52063167.slice/crio-1575284420f91a5d731b4fab804eeba4b0d2bc45a471abd8b59844ad09efcb5a WatchSource:0}: Error finding container 1575284420f91a5d731b4fab804eeba4b0d2bc45a471abd8b59844ad09efcb5a: Status 404 returned error can't find the container with id 1575284420f91a5d731b4fab804eeba4b0d2bc45a471abd8b59844ad09efcb5a Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.163693 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.231811 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d07e49ea-3586-42bb-9af9-0f68d9823fec","Type":"ContainerStarted","Data":"d77b17d75b8ce833cc3dd1612ebb926379e81ebeb6f0068f5a8e5d602c163d03"} Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.233854 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8dh2n" event={"ID":"f49d0a16-f608-4d69-af94-2e84fc4dee10","Type":"ContainerStarted","Data":"de71d8fe70be007e4bd9eb4152ff6dd4bcc12901d836cdd54c25356484e00f05"} Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.233881 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8dh2n" event={"ID":"f49d0a16-f608-4d69-af94-2e84fc4dee10","Type":"ContainerStarted","Data":"cdeb9889b774fade70399066ac6fe918b3a6299c7e04b407efd7e9043d43a915"} Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.243529 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81712ea8-c61d-4966-b51f-643b52063167","Type":"ContainerStarted","Data":"1575284420f91a5d731b4fab804eeba4b0d2bc45a471abd8b59844ad09efcb5a"} Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.248711 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.259151 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.264144 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-8dh2n" podStartSLOduration=2.26412622 podStartE2EDuration="2.26412622s" podCreationTimestamp="2025-12-05 05:46:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:08.254683486 +0000 UTC m=+1170.491413753" watchObservedRunningTime="2025-12-05 05:46:08.26412622 +0000 UTC m=+1170.500856488" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.380271 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-r5bbx"] Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.525696 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8rdbd"] Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.527051 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.529394 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.533103 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.534674 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.534785 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-config-data\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.534821 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-scripts\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.534856 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txplq\" (UniqueName: \"kubernetes.io/projected/d81472ce-02d8-406f-952a-a6196c2770f4-kube-api-access-txplq\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.540299 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8rdbd"] Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.636658 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.636714 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-config-data\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.636740 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-scripts\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.636761 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txplq\" (UniqueName: \"kubernetes.io/projected/d81472ce-02d8-406f-952a-a6196c2770f4-kube-api-access-txplq\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.640989 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-scripts\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.642202 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-config-data\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.645525 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.653632 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txplq\" (UniqueName: \"kubernetes.io/projected/d81472ce-02d8-406f-952a-a6196c2770f4-kube-api-access-txplq\") pod \"nova-cell1-conductor-db-sync-8rdbd\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:08 crc kubenswrapper[4652]: I1205 05:46:08.841963 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:09 crc kubenswrapper[4652]: I1205 05:46:09.257828 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52d17769-58d5-42fc-b559-291688d65e31","Type":"ContainerStarted","Data":"9da09eac828e22eb8dd0a756bdc43051bf8601999eae0dc1b31ff30313420639"} Dec 05 05:46:09 crc kubenswrapper[4652]: I1205 05:46:09.260296 4652 generic.go:334] "Generic (PLEG): container finished" podID="d5e284f4-329e-47be-862a-96dd0d66c5fd" containerID="9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f" exitCode=0 Dec 05 05:46:09 crc kubenswrapper[4652]: I1205 05:46:09.260341 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" event={"ID":"d5e284f4-329e-47be-862a-96dd0d66c5fd","Type":"ContainerDied","Data":"9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f"} Dec 05 05:46:09 crc kubenswrapper[4652]: I1205 05:46:09.260360 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" event={"ID":"d5e284f4-329e-47be-862a-96dd0d66c5fd","Type":"ContainerStarted","Data":"a2a1350d4eddd8ec53528335912191c2d1682d5266f631512fbc75b6639fadc6"} Dec 05 05:46:09 crc kubenswrapper[4652]: I1205 05:46:09.262831 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"579de9cf-880a-49ee-a42d-6569627d4caa","Type":"ContainerStarted","Data":"495c8064b3554c76d22e94f3856c4d6df09329c6a49eb4c42dc473dc1829d3f3"} Dec 05 05:46:09 crc kubenswrapper[4652]: I1205 05:46:09.339019 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8rdbd"] Dec 05 05:46:10 crc kubenswrapper[4652]: I1205 05:46:10.315790 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" event={"ID":"d81472ce-02d8-406f-952a-a6196c2770f4","Type":"ContainerStarted","Data":"3fe241397942c9e6cb2ef45d388a0401feafaafa09e1f750fe9c4108e5358d32"} Dec 05 05:46:10 crc kubenswrapper[4652]: I1205 05:46:10.316269 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" event={"ID":"d81472ce-02d8-406f-952a-a6196c2770f4","Type":"ContainerStarted","Data":"1b2e754d748e0da41f636096002d095867f1c2e526aeb6a63ba2ab5edf15252f"} Dec 05 05:46:10 crc kubenswrapper[4652]: I1205 05:46:10.371360 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" podStartSLOduration=2.371342451 podStartE2EDuration="2.371342451s" podCreationTimestamp="2025-12-05 05:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:10.338760771 +0000 UTC m=+1172.575491038" watchObservedRunningTime="2025-12-05 05:46:10.371342451 +0000 UTC m=+1172.608072718" Dec 05 05:46:10 crc kubenswrapper[4652]: I1205 05:46:10.375476 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" event={"ID":"d5e284f4-329e-47be-862a-96dd0d66c5fd","Type":"ContainerStarted","Data":"cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62"} Dec 05 05:46:10 crc kubenswrapper[4652]: I1205 05:46:10.385408 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:10 crc kubenswrapper[4652]: I1205 05:46:10.417415 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" podStartSLOduration=3.4174005579999998 podStartE2EDuration="3.417400558s" podCreationTimestamp="2025-12-05 05:46:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:10.413674285 +0000 UTC m=+1172.650404553" watchObservedRunningTime="2025-12-05 05:46:10.417400558 +0000 UTC m=+1172.654130826" Dec 05 05:46:10 crc kubenswrapper[4652]: I1205 05:46:10.933480 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:10 crc kubenswrapper[4652]: I1205 05:46:10.941449 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.396789 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81712ea8-c61d-4966-b51f-643b52063167","Type":"ContainerStarted","Data":"f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566"} Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.397366 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81712ea8-c61d-4966-b51f-643b52063167","Type":"ContainerStarted","Data":"49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f"} Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.400408 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52d17769-58d5-42fc-b559-291688d65e31","Type":"ContainerStarted","Data":"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f"} Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.400445 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52d17769-58d5-42fc-b559-291688d65e31","Type":"ContainerStarted","Data":"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272"} Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.400551 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="52d17769-58d5-42fc-b559-291688d65e31" containerName="nova-metadata-log" containerID="cri-o://7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272" gracePeriod=30 Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.400783 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="52d17769-58d5-42fc-b559-291688d65e31" containerName="nova-metadata-metadata" containerID="cri-o://74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f" gracePeriod=30 Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.403766 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d07e49ea-3586-42bb-9af9-0f68d9823fec","Type":"ContainerStarted","Data":"c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c"} Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.415909 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"579de9cf-880a-49ee-a42d-6569627d4caa","Type":"ContainerStarted","Data":"1ae366a75281b020232cf9c323b30d4ae8bf6d5ef8ee1f4b804863da2f55d2a9"} Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.416105 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="579de9cf-880a-49ee-a42d-6569627d4caa" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1ae366a75281b020232cf9c323b30d4ae8bf6d5ef8ee1f4b804863da2f55d2a9" gracePeriod=30 Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.420857 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.3079728360000002 podStartE2EDuration="5.420843889s" podCreationTimestamp="2025-12-05 05:46:07 +0000 UTC" firstStartedPulling="2025-12-05 05:46:08.146290464 +0000 UTC m=+1170.383020730" lastFinishedPulling="2025-12-05 05:46:11.259161516 +0000 UTC m=+1173.495891783" observedRunningTime="2025-12-05 05:46:12.416267739 +0000 UTC m=+1174.652998006" watchObservedRunningTime="2025-12-05 05:46:12.420843889 +0000 UTC m=+1174.657574155" Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.434455 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.123484899 podStartE2EDuration="6.43444031s" podCreationTimestamp="2025-12-05 05:46:06 +0000 UTC" firstStartedPulling="2025-12-05 05:46:07.939726408 +0000 UTC m=+1170.176456675" lastFinishedPulling="2025-12-05 05:46:11.250681819 +0000 UTC m=+1173.487412086" observedRunningTime="2025-12-05 05:46:12.428433402 +0000 UTC m=+1174.665163669" watchObservedRunningTime="2025-12-05 05:46:12.43444031 +0000 UTC m=+1174.671170577" Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.443938 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.461331002 podStartE2EDuration="5.443930315s" podCreationTimestamp="2025-12-05 05:46:07 +0000 UTC" firstStartedPulling="2025-12-05 05:46:08.269446419 +0000 UTC m=+1170.506176687" lastFinishedPulling="2025-12-05 05:46:11.252045733 +0000 UTC m=+1173.488776000" observedRunningTime="2025-12-05 05:46:12.442601388 +0000 UTC m=+1174.679331655" watchObservedRunningTime="2025-12-05 05:46:12.443930315 +0000 UTC m=+1174.680660582" Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.463531 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.479196876 podStartE2EDuration="5.463519438s" podCreationTimestamp="2025-12-05 05:46:07 +0000 UTC" firstStartedPulling="2025-12-05 05:46:08.279774068 +0000 UTC m=+1170.516504335" lastFinishedPulling="2025-12-05 05:46:11.264096629 +0000 UTC m=+1173.500826897" observedRunningTime="2025-12-05 05:46:12.458631883 +0000 UTC m=+1174.695362150" watchObservedRunningTime="2025-12-05 05:46:12.463519438 +0000 UTC m=+1174.700249705" Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.628819 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.635041 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.635091 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 05:46:12 crc kubenswrapper[4652]: I1205 05:46:12.963180 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.049508 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-combined-ca-bundle\") pod \"52d17769-58d5-42fc-b559-291688d65e31\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.049596 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52d17769-58d5-42fc-b559-291688d65e31-logs\") pod \"52d17769-58d5-42fc-b559-291688d65e31\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.049682 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-config-data\") pod \"52d17769-58d5-42fc-b559-291688d65e31\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.049714 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkkmv\" (UniqueName: \"kubernetes.io/projected/52d17769-58d5-42fc-b559-291688d65e31-kube-api-access-wkkmv\") pod \"52d17769-58d5-42fc-b559-291688d65e31\" (UID: \"52d17769-58d5-42fc-b559-291688d65e31\") " Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.051146 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52d17769-58d5-42fc-b559-291688d65e31-logs" (OuterVolumeSpecName: "logs") pod "52d17769-58d5-42fc-b559-291688d65e31" (UID: "52d17769-58d5-42fc-b559-291688d65e31"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.054741 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d17769-58d5-42fc-b559-291688d65e31-kube-api-access-wkkmv" (OuterVolumeSpecName: "kube-api-access-wkkmv") pod "52d17769-58d5-42fc-b559-291688d65e31" (UID: "52d17769-58d5-42fc-b559-291688d65e31"). InnerVolumeSpecName "kube-api-access-wkkmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.072791 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-config-data" (OuterVolumeSpecName: "config-data") pod "52d17769-58d5-42fc-b559-291688d65e31" (UID: "52d17769-58d5-42fc-b559-291688d65e31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.073933 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52d17769-58d5-42fc-b559-291688d65e31" (UID: "52d17769-58d5-42fc-b559-291688d65e31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.153671 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.153723 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkkmv\" (UniqueName: \"kubernetes.io/projected/52d17769-58d5-42fc-b559-291688d65e31-kube-api-access-wkkmv\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.153741 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52d17769-58d5-42fc-b559-291688d65e31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.153809 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52d17769-58d5-42fc-b559-291688d65e31-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.427693 4652 generic.go:334] "Generic (PLEG): container finished" podID="52d17769-58d5-42fc-b559-291688d65e31" containerID="74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f" exitCode=0 Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.427722 4652 generic.go:334] "Generic (PLEG): container finished" podID="52d17769-58d5-42fc-b559-291688d65e31" containerID="7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272" exitCode=143 Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.428502 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.435722 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52d17769-58d5-42fc-b559-291688d65e31","Type":"ContainerDied","Data":"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f"} Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.435762 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52d17769-58d5-42fc-b559-291688d65e31","Type":"ContainerDied","Data":"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272"} Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.435775 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52d17769-58d5-42fc-b559-291688d65e31","Type":"ContainerDied","Data":"9da09eac828e22eb8dd0a756bdc43051bf8601999eae0dc1b31ff30313420639"} Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.435789 4652 scope.go:117] "RemoveContainer" containerID="74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.462831 4652 scope.go:117] "RemoveContainer" containerID="7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.473823 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.499899 4652 scope.go:117] "RemoveContainer" containerID="74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f" Dec 05 05:46:13 crc kubenswrapper[4652]: E1205 05:46:13.501576 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f\": container with ID starting with 74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f not found: ID does not exist" containerID="74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.501613 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f"} err="failed to get container status \"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f\": rpc error: code = NotFound desc = could not find container \"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f\": container with ID starting with 74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f not found: ID does not exist" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.501634 4652 scope.go:117] "RemoveContainer" containerID="7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272" Dec 05 05:46:13 crc kubenswrapper[4652]: E1205 05:46:13.502107 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272\": container with ID starting with 7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272 not found: ID does not exist" containerID="7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.502131 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272"} err="failed to get container status \"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272\": rpc error: code = NotFound desc = could not find container \"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272\": container with ID starting with 7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272 not found: ID does not exist" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.502146 4652 scope.go:117] "RemoveContainer" containerID="74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.502343 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f"} err="failed to get container status \"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f\": rpc error: code = NotFound desc = could not find container \"74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f\": container with ID starting with 74aff4e6b33dadffa10a13ab584bb1cf574a843220e05d26e9086a3e8ff32a8f not found: ID does not exist" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.502359 4652 scope.go:117] "RemoveContainer" containerID="7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.502594 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272"} err="failed to get container status \"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272\": rpc error: code = NotFound desc = could not find container \"7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272\": container with ID starting with 7947ccccf008e0b6763ffe0acc930c03b3d37b1d0db91a5827d3a3def235b272 not found: ID does not exist" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.504858 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.522015 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:13 crc kubenswrapper[4652]: E1205 05:46:13.522485 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d17769-58d5-42fc-b559-291688d65e31" containerName="nova-metadata-metadata" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.522505 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d17769-58d5-42fc-b559-291688d65e31" containerName="nova-metadata-metadata" Dec 05 05:46:13 crc kubenswrapper[4652]: E1205 05:46:13.522531 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d17769-58d5-42fc-b559-291688d65e31" containerName="nova-metadata-log" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.522539 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d17769-58d5-42fc-b559-291688d65e31" containerName="nova-metadata-log" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.522767 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d17769-58d5-42fc-b559-291688d65e31" containerName="nova-metadata-log" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.522788 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d17769-58d5-42fc-b559-291688d65e31" containerName="nova-metadata-metadata" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.523823 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.528028 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.528353 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.534504 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.560750 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.560821 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sn7x\" (UniqueName: \"kubernetes.io/projected/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-kube-api-access-2sn7x\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.560959 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.561150 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-config-data\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.561197 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-logs\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.662846 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sn7x\" (UniqueName: \"kubernetes.io/projected/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-kube-api-access-2sn7x\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.662931 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.663021 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-config-data\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.663057 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-logs\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.663110 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.665819 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-logs\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.669678 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.670029 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-config-data\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.677047 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.677143 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sn7x\" (UniqueName: \"kubernetes.io/projected/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-kube-api-access-2sn7x\") pod \"nova-metadata-0\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " pod="openstack/nova-metadata-0" Dec 05 05:46:13 crc kubenswrapper[4652]: I1205 05:46:13.847657 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:14 crc kubenswrapper[4652]: I1205 05:46:14.134534 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52d17769-58d5-42fc-b559-291688d65e31" path="/var/lib/kubelet/pods/52d17769-58d5-42fc-b559-291688d65e31/volumes" Dec 05 05:46:14 crc kubenswrapper[4652]: I1205 05:46:14.271935 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:14 crc kubenswrapper[4652]: I1205 05:46:14.456831 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22904cb7-2e5f-4cc9-b509-3f16170f8e3f","Type":"ContainerStarted","Data":"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4"} Dec 05 05:46:14 crc kubenswrapper[4652]: I1205 05:46:14.457099 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22904cb7-2e5f-4cc9-b509-3f16170f8e3f","Type":"ContainerStarted","Data":"368ffebbe5a8f081a34d1f57be67b0137632fef22307041e8c1d809349cbbf12"} Dec 05 05:46:14 crc kubenswrapper[4652]: I1205 05:46:14.466079 4652 generic.go:334] "Generic (PLEG): container finished" podID="d81472ce-02d8-406f-952a-a6196c2770f4" containerID="3fe241397942c9e6cb2ef45d388a0401feafaafa09e1f750fe9c4108e5358d32" exitCode=0 Dec 05 05:46:14 crc kubenswrapper[4652]: I1205 05:46:14.466168 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" event={"ID":"d81472ce-02d8-406f-952a-a6196c2770f4","Type":"ContainerDied","Data":"3fe241397942c9e6cb2ef45d388a0401feafaafa09e1f750fe9c4108e5358d32"} Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.481960 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22904cb7-2e5f-4cc9-b509-3f16170f8e3f","Type":"ContainerStarted","Data":"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367"} Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.484278 4652 generic.go:334] "Generic (PLEG): container finished" podID="f49d0a16-f608-4d69-af94-2e84fc4dee10" containerID="de71d8fe70be007e4bd9eb4152ff6dd4bcc12901d836cdd54c25356484e00f05" exitCode=0 Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.484514 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8dh2n" event={"ID":"f49d0a16-f608-4d69-af94-2e84fc4dee10","Type":"ContainerDied","Data":"de71d8fe70be007e4bd9eb4152ff6dd4bcc12901d836cdd54c25356484e00f05"} Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.507465 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.507434471 podStartE2EDuration="2.507434471s" podCreationTimestamp="2025-12-05 05:46:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:15.498099168 +0000 UTC m=+1177.734829435" watchObservedRunningTime="2025-12-05 05:46:15.507434471 +0000 UTC m=+1177.744164739" Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.794112 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.913765 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-scripts\") pod \"d81472ce-02d8-406f-952a-a6196c2770f4\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.914235 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txplq\" (UniqueName: \"kubernetes.io/projected/d81472ce-02d8-406f-952a-a6196c2770f4-kube-api-access-txplq\") pod \"d81472ce-02d8-406f-952a-a6196c2770f4\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.914329 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-combined-ca-bundle\") pod \"d81472ce-02d8-406f-952a-a6196c2770f4\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.914472 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-config-data\") pod \"d81472ce-02d8-406f-952a-a6196c2770f4\" (UID: \"d81472ce-02d8-406f-952a-a6196c2770f4\") " Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.920225 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-scripts" (OuterVolumeSpecName: "scripts") pod "d81472ce-02d8-406f-952a-a6196c2770f4" (UID: "d81472ce-02d8-406f-952a-a6196c2770f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.920494 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d81472ce-02d8-406f-952a-a6196c2770f4-kube-api-access-txplq" (OuterVolumeSpecName: "kube-api-access-txplq") pod "d81472ce-02d8-406f-952a-a6196c2770f4" (UID: "d81472ce-02d8-406f-952a-a6196c2770f4"). InnerVolumeSpecName "kube-api-access-txplq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.941199 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d81472ce-02d8-406f-952a-a6196c2770f4" (UID: "d81472ce-02d8-406f-952a-a6196c2770f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:15 crc kubenswrapper[4652]: I1205 05:46:15.941685 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-config-data" (OuterVolumeSpecName: "config-data") pod "d81472ce-02d8-406f-952a-a6196c2770f4" (UID: "d81472ce-02d8-406f-952a-a6196c2770f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.018398 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.018429 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txplq\" (UniqueName: \"kubernetes.io/projected/d81472ce-02d8-406f-952a-a6196c2770f4-kube-api-access-txplq\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.018442 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.018464 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81472ce-02d8-406f-952a-a6196c2770f4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.498984 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.498938 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-8rdbd" event={"ID":"d81472ce-02d8-406f-952a-a6196c2770f4","Type":"ContainerDied","Data":"1b2e754d748e0da41f636096002d095867f1c2e526aeb6a63ba2ab5edf15252f"} Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.500305 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b2e754d748e0da41f636096002d095867f1c2e526aeb6a63ba2ab5edf15252f" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.557546 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 05:46:16 crc kubenswrapper[4652]: E1205 05:46:16.558277 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81472ce-02d8-406f-952a-a6196c2770f4" containerName="nova-cell1-conductor-db-sync" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.558308 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81472ce-02d8-406f-952a-a6196c2770f4" containerName="nova-cell1-conductor-db-sync" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.558745 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d81472ce-02d8-406f-952a-a6196c2770f4" containerName="nova-cell1-conductor-db-sync" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.560111 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.562533 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.569850 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.736663 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3e21f7d-50b3-48f5-8623-a85830778ab7-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.737904 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5jms\" (UniqueName: \"kubernetes.io/projected/d3e21f7d-50b3-48f5-8623-a85830778ab7-kube-api-access-f5jms\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.737976 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3e21f7d-50b3-48f5-8623-a85830778ab7-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.839859 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5jms\" (UniqueName: \"kubernetes.io/projected/d3e21f7d-50b3-48f5-8623-a85830778ab7-kube-api-access-f5jms\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.839909 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3e21f7d-50b3-48f5-8623-a85830778ab7-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.840033 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3e21f7d-50b3-48f5-8623-a85830778ab7-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.845271 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3e21f7d-50b3-48f5-8623-a85830778ab7-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.845911 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3e21f7d-50b3-48f5-8623-a85830778ab7-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.848607 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.855181 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5jms\" (UniqueName: \"kubernetes.io/projected/d3e21f7d-50b3-48f5-8623-a85830778ab7-kube-api-access-f5jms\") pod \"nova-cell1-conductor-0\" (UID: \"d3e21f7d-50b3-48f5-8623-a85830778ab7\") " pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:16 crc kubenswrapper[4652]: I1205 05:46:16.893702 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.043879 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-config-data\") pod \"f49d0a16-f608-4d69-af94-2e84fc4dee10\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.044587 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-scripts\") pod \"f49d0a16-f608-4d69-af94-2e84fc4dee10\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.044735 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-combined-ca-bundle\") pod \"f49d0a16-f608-4d69-af94-2e84fc4dee10\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.044934 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7ftd\" (UniqueName: \"kubernetes.io/projected/f49d0a16-f608-4d69-af94-2e84fc4dee10-kube-api-access-s7ftd\") pod \"f49d0a16-f608-4d69-af94-2e84fc4dee10\" (UID: \"f49d0a16-f608-4d69-af94-2e84fc4dee10\") " Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.048586 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-scripts" (OuterVolumeSpecName: "scripts") pod "f49d0a16-f608-4d69-af94-2e84fc4dee10" (UID: "f49d0a16-f608-4d69-af94-2e84fc4dee10"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.048941 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f49d0a16-f608-4d69-af94-2e84fc4dee10-kube-api-access-s7ftd" (OuterVolumeSpecName: "kube-api-access-s7ftd") pod "f49d0a16-f608-4d69-af94-2e84fc4dee10" (UID: "f49d0a16-f608-4d69-af94-2e84fc4dee10"). InnerVolumeSpecName "kube-api-access-s7ftd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.068280 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-config-data" (OuterVolumeSpecName: "config-data") pod "f49d0a16-f608-4d69-af94-2e84fc4dee10" (UID: "f49d0a16-f608-4d69-af94-2e84fc4dee10"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.070751 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f49d0a16-f608-4d69-af94-2e84fc4dee10" (UID: "f49d0a16-f608-4d69-af94-2e84fc4dee10"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.149019 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.149052 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.149137 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7ftd\" (UniqueName: \"kubernetes.io/projected/f49d0a16-f608-4d69-af94-2e84fc4dee10-kube-api-access-s7ftd\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.149149 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f49d0a16-f608-4d69-af94-2e84fc4dee10-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.321055 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 05:46:17 crc kubenswrapper[4652]: W1205 05:46:17.322351 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3e21f7d_50b3_48f5_8623_a85830778ab7.slice/crio-492d64a06a85fa824d4283e60aef4b23c096d1b2f4078a3329f99fb876b990c5 WatchSource:0}: Error finding container 492d64a06a85fa824d4283e60aef4b23c096d1b2f4078a3329f99fb876b990c5: Status 404 returned error can't find the container with id 492d64a06a85fa824d4283e60aef4b23c096d1b2f4078a3329f99fb876b990c5 Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.338895 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.338925 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.502329 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.507999 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d3e21f7d-50b3-48f5-8623-a85830778ab7","Type":"ContainerStarted","Data":"492d64a06a85fa824d4283e60aef4b23c096d1b2f4078a3329f99fb876b990c5"} Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.509918 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8dh2n" event={"ID":"f49d0a16-f608-4d69-af94-2e84fc4dee10","Type":"ContainerDied","Data":"cdeb9889b774fade70399066ac6fe918b3a6299c7e04b407efd7e9043d43a915"} Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.509948 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdeb9889b774fade70399066ac6fe918b3a6299c7e04b407efd7e9043d43a915" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.509942 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8dh2n" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.543325 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.591460 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.591507 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.684628 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.701719 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.721255 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.721502 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerName="nova-metadata-log" containerID="cri-o://66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4" gracePeriod=30 Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.721573 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerName="nova-metadata-metadata" containerID="cri-o://c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367" gracePeriod=30 Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.778442 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-m6xxk"] Dec 05 05:46:17 crc kubenswrapper[4652]: I1205 05:46:17.778749 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" podUID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" containerName="dnsmasq-dns" containerID="cri-o://10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4" gracePeriod=10 Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.058892 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.213581 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.283431 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-config\") pod \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.306814 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.326596 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-nb\") pod \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.326850 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-sb\") pod \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.327371 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2lr7\" (UniqueName: \"kubernetes.io/projected/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-kube-api-access-b2lr7\") pod \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.327796 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-swift-storage-0\") pod \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.328093 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-svc\") pod \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\" (UID: \"9f6dcbe4-f415-43ec-a986-746cdcdeba2b\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.350673 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-config" (OuterVolumeSpecName: "config") pod "9f6dcbe4-f415-43ec-a986-746cdcdeba2b" (UID: "9f6dcbe4-f415-43ec-a986-746cdcdeba2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.351218 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-kube-api-access-b2lr7" (OuterVolumeSpecName: "kube-api-access-b2lr7") pod "9f6dcbe4-f415-43ec-a986-746cdcdeba2b" (UID: "9f6dcbe4-f415-43ec-a986-746cdcdeba2b"). InnerVolumeSpecName "kube-api-access-b2lr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.388095 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9f6dcbe4-f415-43ec-a986-746cdcdeba2b" (UID: "9f6dcbe4-f415-43ec-a986-746cdcdeba2b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.399912 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9f6dcbe4-f415-43ec-a986-746cdcdeba2b" (UID: "9f6dcbe4-f415-43ec-a986-746cdcdeba2b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.424324 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f6dcbe4-f415-43ec-a986-746cdcdeba2b" (UID: "9f6dcbe4-f415-43ec-a986-746cdcdeba2b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.426818 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9f6dcbe4-f415-43ec-a986-746cdcdeba2b" (UID: "9f6dcbe4-f415-43ec-a986-746cdcdeba2b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.439426 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-combined-ca-bundle\") pod \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.439521 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-logs\") pod \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.439552 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-config-data\") pod \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.439692 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-nova-metadata-tls-certs\") pod \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.439757 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-logs" (OuterVolumeSpecName: "logs") pod "22904cb7-2e5f-4cc9-b509-3f16170f8e3f" (UID: "22904cb7-2e5f-4cc9-b509-3f16170f8e3f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.439781 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sn7x\" (UniqueName: \"kubernetes.io/projected/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-kube-api-access-2sn7x\") pod \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\" (UID: \"22904cb7-2e5f-4cc9-b509-3f16170f8e3f\") " Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.440200 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.440218 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.440226 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.440236 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.440245 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2lr7\" (UniqueName: \"kubernetes.io/projected/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-kube-api-access-b2lr7\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.440253 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.440261 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f6dcbe4-f415-43ec-a986-746cdcdeba2b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.444648 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-kube-api-access-2sn7x" (OuterVolumeSpecName: "kube-api-access-2sn7x") pod "22904cb7-2e5f-4cc9-b509-3f16170f8e3f" (UID: "22904cb7-2e5f-4cc9-b509-3f16170f8e3f"). InnerVolumeSpecName "kube-api-access-2sn7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.466450 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-config-data" (OuterVolumeSpecName: "config-data") pod "22904cb7-2e5f-4cc9-b509-3f16170f8e3f" (UID: "22904cb7-2e5f-4cc9-b509-3f16170f8e3f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.468265 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22904cb7-2e5f-4cc9-b509-3f16170f8e3f" (UID: "22904cb7-2e5f-4cc9-b509-3f16170f8e3f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.489858 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "22904cb7-2e5f-4cc9-b509-3f16170f8e3f" (UID: "22904cb7-2e5f-4cc9-b509-3f16170f8e3f"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.524257 4652 generic.go:334] "Generic (PLEG): container finished" podID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerID="c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367" exitCode=0 Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.525286 4652 generic.go:334] "Generic (PLEG): container finished" podID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerID="66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4" exitCode=143 Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.524400 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.524406 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22904cb7-2e5f-4cc9-b509-3f16170f8e3f","Type":"ContainerDied","Data":"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367"} Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.525666 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22904cb7-2e5f-4cc9-b509-3f16170f8e3f","Type":"ContainerDied","Data":"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4"} Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.525698 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22904cb7-2e5f-4cc9-b509-3f16170f8e3f","Type":"ContainerDied","Data":"368ffebbe5a8f081a34d1f57be67b0137632fef22307041e8c1d809349cbbf12"} Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.525731 4652 scope.go:117] "RemoveContainer" containerID="c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.536118 4652 generic.go:334] "Generic (PLEG): container finished" podID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" containerID="10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4" exitCode=0 Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.536164 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" event={"ID":"9f6dcbe4-f415-43ec-a986-746cdcdeba2b","Type":"ContainerDied","Data":"10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4"} Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.536215 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" event={"ID":"9f6dcbe4-f415-43ec-a986-746cdcdeba2b","Type":"ContainerDied","Data":"054bec4e17e545a15cb5ca25d0ff0e9bfb21477ed2cc146dbbdc731a17182108"} Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.536226 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75958fc765-m6xxk" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.542195 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d3e21f7d-50b3-48f5-8623-a85830778ab7","Type":"ContainerStarted","Data":"9a25ffd6a6f44f6c52cf49e79c041f5470d9977b97c845ee288b5b4345119c64"} Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.542241 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.542412 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-log" containerID="cri-o://49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f" gracePeriod=30 Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.542780 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-api" containerID="cri-o://f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566" gracePeriod=30 Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.543048 4652 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.543352 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sn7x\" (UniqueName: \"kubernetes.io/projected/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-kube-api-access-2sn7x\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.543378 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.543395 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22904cb7-2e5f-4cc9-b509-3f16170f8e3f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.570408 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.204:8774/\": EOF" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.570639 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.204:8774/\": EOF" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.575779 4652 scope.go:117] "RemoveContainer" containerID="66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.592936 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.608909 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.609624 4652 scope.go:117] "RemoveContainer" containerID="c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367" Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.611145 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367\": container with ID starting with c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367 not found: ID does not exist" containerID="c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.611201 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367"} err="failed to get container status \"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367\": rpc error: code = NotFound desc = could not find container \"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367\": container with ID starting with c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367 not found: ID does not exist" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.611234 4652 scope.go:117] "RemoveContainer" containerID="66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4" Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.612358 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4\": container with ID starting with 66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4 not found: ID does not exist" containerID="66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.612388 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4"} err="failed to get container status \"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4\": rpc error: code = NotFound desc = could not find container \"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4\": container with ID starting with 66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4 not found: ID does not exist" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.612407 4652 scope.go:117] "RemoveContainer" containerID="c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.612691 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367"} err="failed to get container status \"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367\": rpc error: code = NotFound desc = could not find container \"c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367\": container with ID starting with c436b1e54f4dc6a9353835bd8906c42fe3a5438cbb5ae7dbab45ff30b00fc367 not found: ID does not exist" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.612717 4652 scope.go:117] "RemoveContainer" containerID="66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.612959 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4"} err="failed to get container status \"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4\": rpc error: code = NotFound desc = could not find container \"66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4\": container with ID starting with 66760117921ef3c47213d2089e11507b76fc04ad5cc5a98e8ea89679fd84d6c4 not found: ID does not exist" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.612982 4652 scope.go:117] "RemoveContainer" containerID="10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.616489 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.617017 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" containerName="init" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617036 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" containerName="init" Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.617061 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" containerName="dnsmasq-dns" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617070 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" containerName="dnsmasq-dns" Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.617084 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerName="nova-metadata-log" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617091 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerName="nova-metadata-log" Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.617117 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerName="nova-metadata-metadata" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617123 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerName="nova-metadata-metadata" Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.617134 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f49d0a16-f608-4d69-af94-2e84fc4dee10" containerName="nova-manage" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617140 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f49d0a16-f608-4d69-af94-2e84fc4dee10" containerName="nova-manage" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617353 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f49d0a16-f608-4d69-af94-2e84fc4dee10" containerName="nova-manage" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617383 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerName="nova-metadata-metadata" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617394 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" containerName="nova-metadata-log" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.617403 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" containerName="dnsmasq-dns" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.618546 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.620592 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.620821 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.625396 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.625472 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.625445223 podStartE2EDuration="2.625445223s" podCreationTimestamp="2025-12-05 05:46:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:18.605216227 +0000 UTC m=+1180.841946493" watchObservedRunningTime="2025-12-05 05:46:18.625445223 +0000 UTC m=+1180.862175490" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.641171 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-m6xxk"] Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.646073 4652 scope.go:117] "RemoveContainer" containerID="8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.651426 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75958fc765-m6xxk"] Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.672777 4652 scope.go:117] "RemoveContainer" containerID="10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4" Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.673434 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4\": container with ID starting with 10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4 not found: ID does not exist" containerID="10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.673472 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4"} err="failed to get container status \"10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4\": rpc error: code = NotFound desc = could not find container \"10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4\": container with ID starting with 10fe691a42c74999fe7124e68dff2cb831cd8858e292fc726fdcf68b703d82d4 not found: ID does not exist" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.673494 4652 scope.go:117] "RemoveContainer" containerID="8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3" Dec 05 05:46:18 crc kubenswrapper[4652]: E1205 05:46:18.674408 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3\": container with ID starting with 8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3 not found: ID does not exist" containerID="8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.674447 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3"} err="failed to get container status \"8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3\": rpc error: code = NotFound desc = could not find container \"8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3\": container with ID starting with 8c108e7b558217d25fc341e8e0d92bbeb056eeca660acd7cc3dd1f7f847ec3f3 not found: ID does not exist" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.750056 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5583554b-9fea-4104-a8a7-91e2151b3f45-logs\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.750102 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.750253 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-config-data\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.750400 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtk87\" (UniqueName: \"kubernetes.io/projected/5583554b-9fea-4104-a8a7-91e2151b3f45-kube-api-access-rtk87\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.750529 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.852747 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5583554b-9fea-4104-a8a7-91e2151b3f45-logs\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.852802 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.852874 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-config-data\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.852915 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtk87\" (UniqueName: \"kubernetes.io/projected/5583554b-9fea-4104-a8a7-91e2151b3f45-kube-api-access-rtk87\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.852950 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.853796 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5583554b-9fea-4104-a8a7-91e2151b3f45-logs\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.857076 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.860737 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.865446 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-config-data\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.867916 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtk87\" (UniqueName: \"kubernetes.io/projected/5583554b-9fea-4104-a8a7-91e2151b3f45-kube-api-access-rtk87\") pod \"nova-metadata-0\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " pod="openstack/nova-metadata-0" Dec 05 05:46:18 crc kubenswrapper[4652]: I1205 05:46:18.938880 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:46:19 crc kubenswrapper[4652]: I1205 05:46:19.325661 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:46:19 crc kubenswrapper[4652]: W1205 05:46:19.339105 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5583554b_9fea_4104_a8a7_91e2151b3f45.slice/crio-1b87127603dcdac8ae6a2b4f901237ad015323e24ffb81abbd21782b8d7e96f4 WatchSource:0}: Error finding container 1b87127603dcdac8ae6a2b4f901237ad015323e24ffb81abbd21782b8d7e96f4: Status 404 returned error can't find the container with id 1b87127603dcdac8ae6a2b4f901237ad015323e24ffb81abbd21782b8d7e96f4 Dec 05 05:46:19 crc kubenswrapper[4652]: I1205 05:46:19.561591 4652 generic.go:334] "Generic (PLEG): container finished" podID="81712ea8-c61d-4966-b51f-643b52063167" containerID="49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f" exitCode=143 Dec 05 05:46:19 crc kubenswrapper[4652]: I1205 05:46:19.561778 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81712ea8-c61d-4966-b51f-643b52063167","Type":"ContainerDied","Data":"49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f"} Dec 05 05:46:19 crc kubenswrapper[4652]: I1205 05:46:19.563440 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d07e49ea-3586-42bb-9af9-0f68d9823fec" containerName="nova-scheduler-scheduler" containerID="cri-o://c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c" gracePeriod=30 Dec 05 05:46:19 crc kubenswrapper[4652]: I1205 05:46:19.563538 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5583554b-9fea-4104-a8a7-91e2151b3f45","Type":"ContainerStarted","Data":"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06"} Dec 05 05:46:19 crc kubenswrapper[4652]: I1205 05:46:19.563581 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5583554b-9fea-4104-a8a7-91e2151b3f45","Type":"ContainerStarted","Data":"1b87127603dcdac8ae6a2b4f901237ad015323e24ffb81abbd21782b8d7e96f4"} Dec 05 05:46:20 crc kubenswrapper[4652]: I1205 05:46:20.134714 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22904cb7-2e5f-4cc9-b509-3f16170f8e3f" path="/var/lib/kubelet/pods/22904cb7-2e5f-4cc9-b509-3f16170f8e3f/volumes" Dec 05 05:46:20 crc kubenswrapper[4652]: I1205 05:46:20.135278 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f6dcbe4-f415-43ec-a986-746cdcdeba2b" path="/var/lib/kubelet/pods/9f6dcbe4-f415-43ec-a986-746cdcdeba2b/volumes" Dec 05 05:46:20 crc kubenswrapper[4652]: I1205 05:46:20.571243 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5583554b-9fea-4104-a8a7-91e2151b3f45","Type":"ContainerStarted","Data":"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8"} Dec 05 05:46:20 crc kubenswrapper[4652]: I1205 05:46:20.595330 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.59531035 podStartE2EDuration="2.59531035s" podCreationTimestamp="2025-12-05 05:46:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:20.58358221 +0000 UTC m=+1182.820312477" watchObservedRunningTime="2025-12-05 05:46:20.59531035 +0000 UTC m=+1182.832040617" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.031684 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.132983 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81712ea8-c61d-4966-b51f-643b52063167-logs\") pod \"81712ea8-c61d-4966-b51f-643b52063167\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.133093 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmrtp\" (UniqueName: \"kubernetes.io/projected/81712ea8-c61d-4966-b51f-643b52063167-kube-api-access-cmrtp\") pod \"81712ea8-c61d-4966-b51f-643b52063167\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.133151 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-config-data\") pod \"81712ea8-c61d-4966-b51f-643b52063167\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.133267 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-combined-ca-bundle\") pod \"81712ea8-c61d-4966-b51f-643b52063167\" (UID: \"81712ea8-c61d-4966-b51f-643b52063167\") " Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.133462 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81712ea8-c61d-4966-b51f-643b52063167-logs" (OuterVolumeSpecName: "logs") pod "81712ea8-c61d-4966-b51f-643b52063167" (UID: "81712ea8-c61d-4966-b51f-643b52063167"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.133924 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81712ea8-c61d-4966-b51f-643b52063167-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.138645 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81712ea8-c61d-4966-b51f-643b52063167-kube-api-access-cmrtp" (OuterVolumeSpecName: "kube-api-access-cmrtp") pod "81712ea8-c61d-4966-b51f-643b52063167" (UID: "81712ea8-c61d-4966-b51f-643b52063167"). InnerVolumeSpecName "kube-api-access-cmrtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.162049 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81712ea8-c61d-4966-b51f-643b52063167" (UID: "81712ea8-c61d-4966-b51f-643b52063167"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.162775 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-config-data" (OuterVolumeSpecName: "config-data") pod "81712ea8-c61d-4966-b51f-643b52063167" (UID: "81712ea8-c61d-4966-b51f-643b52063167"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.235873 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmrtp\" (UniqueName: \"kubernetes.io/projected/81712ea8-c61d-4966-b51f-643b52063167-kube-api-access-cmrtp\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.236051 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.236107 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81712ea8-c61d-4966-b51f-643b52063167-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:22 crc kubenswrapper[4652]: E1205 05:46:22.341252 4652 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 05:46:22 crc kubenswrapper[4652]: E1205 05:46:22.342688 4652 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 05:46:22 crc kubenswrapper[4652]: E1205 05:46:22.348836 4652 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 05:46:22 crc kubenswrapper[4652]: E1205 05:46:22.348970 4652 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d07e49ea-3586-42bb-9af9-0f68d9823fec" containerName="nova-scheduler-scheduler" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.589206 4652 generic.go:334] "Generic (PLEG): container finished" podID="81712ea8-c61d-4966-b51f-643b52063167" containerID="f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566" exitCode=0 Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.589246 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81712ea8-c61d-4966-b51f-643b52063167","Type":"ContainerDied","Data":"f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566"} Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.589255 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.589272 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81712ea8-c61d-4966-b51f-643b52063167","Type":"ContainerDied","Data":"1575284420f91a5d731b4fab804eeba4b0d2bc45a471abd8b59844ad09efcb5a"} Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.589290 4652 scope.go:117] "RemoveContainer" containerID="f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.612377 4652 scope.go:117] "RemoveContainer" containerID="49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.624844 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.637646 4652 scope.go:117] "RemoveContainer" containerID="f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566" Dec 05 05:46:22 crc kubenswrapper[4652]: E1205 05:46:22.638007 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566\": container with ID starting with f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566 not found: ID does not exist" containerID="f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.638053 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566"} err="failed to get container status \"f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566\": rpc error: code = NotFound desc = could not find container \"f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566\": container with ID starting with f35992e95c4f3e5d9c2c41db68d8c31ba1ab0834d5fc9bcd32acfcee98238566 not found: ID does not exist" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.638089 4652 scope.go:117] "RemoveContainer" containerID="49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f" Dec 05 05:46:22 crc kubenswrapper[4652]: E1205 05:46:22.638401 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f\": container with ID starting with 49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f not found: ID does not exist" containerID="49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.638431 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f"} err="failed to get container status \"49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f\": rpc error: code = NotFound desc = could not find container \"49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f\": container with ID starting with 49c4754414e53404e2eb1954e834503b227b707d11f2dfe61ec16a709c64e78f not found: ID does not exist" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.642135 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.651274 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:22 crc kubenswrapper[4652]: E1205 05:46:22.651814 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-log" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.651828 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-log" Dec 05 05:46:22 crc kubenswrapper[4652]: E1205 05:46:22.651858 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-api" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.651867 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-api" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.652065 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-api" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.652085 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="81712ea8-c61d-4966-b51f-643b52063167" containerName="nova-api-log" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.653781 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.660859 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.664044 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.749171 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-config-data\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.749242 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.749347 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1266ba95-93bf-4fd0-8b3d-98309d61e84a-logs\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.749427 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p79ns\" (UniqueName: \"kubernetes.io/projected/1266ba95-93bf-4fd0-8b3d-98309d61e84a-kube-api-access-p79ns\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.851178 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1266ba95-93bf-4fd0-8b3d-98309d61e84a-logs\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.851394 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p79ns\" (UniqueName: \"kubernetes.io/projected/1266ba95-93bf-4fd0-8b3d-98309d61e84a-kube-api-access-p79ns\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.851569 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1266ba95-93bf-4fd0-8b3d-98309d61e84a-logs\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.851587 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-config-data\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.851711 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.858314 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-config-data\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.858437 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.870665 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p79ns\" (UniqueName: \"kubernetes.io/projected/1266ba95-93bf-4fd0-8b3d-98309d61e84a-kube-api-access-p79ns\") pod \"nova-api-0\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " pod="openstack/nova-api-0" Dec 05 05:46:22 crc kubenswrapper[4652]: I1205 05:46:22.968913 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.063245 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.156197 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grst5\" (UniqueName: \"kubernetes.io/projected/d07e49ea-3586-42bb-9af9-0f68d9823fec-kube-api-access-grst5\") pod \"d07e49ea-3586-42bb-9af9-0f68d9823fec\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.156538 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-config-data\") pod \"d07e49ea-3586-42bb-9af9-0f68d9823fec\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.156655 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-combined-ca-bundle\") pod \"d07e49ea-3586-42bb-9af9-0f68d9823fec\" (UID: \"d07e49ea-3586-42bb-9af9-0f68d9823fec\") " Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.165766 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d07e49ea-3586-42bb-9af9-0f68d9823fec-kube-api-access-grst5" (OuterVolumeSpecName: "kube-api-access-grst5") pod "d07e49ea-3586-42bb-9af9-0f68d9823fec" (UID: "d07e49ea-3586-42bb-9af9-0f68d9823fec"). InnerVolumeSpecName "kube-api-access-grst5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.180809 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-config-data" (OuterVolumeSpecName: "config-data") pod "d07e49ea-3586-42bb-9af9-0f68d9823fec" (UID: "d07e49ea-3586-42bb-9af9-0f68d9823fec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.181494 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d07e49ea-3586-42bb-9af9-0f68d9823fec" (UID: "d07e49ea-3586-42bb-9af9-0f68d9823fec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.261963 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.262010 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grst5\" (UniqueName: \"kubernetes.io/projected/d07e49ea-3586-42bb-9af9-0f68d9823fec-kube-api-access-grst5\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.262024 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d07e49ea-3586-42bb-9af9-0f68d9823fec-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:23 crc kubenswrapper[4652]: W1205 05:46:23.365717 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1266ba95_93bf_4fd0_8b3d_98309d61e84a.slice/crio-27324218c0e7cb2790da2b091e7def46c28aa8b5651e2c4c056a99452064fe1e WatchSource:0}: Error finding container 27324218c0e7cb2790da2b091e7def46c28aa8b5651e2c4c056a99452064fe1e: Status 404 returned error can't find the container with id 27324218c0e7cb2790da2b091e7def46c28aa8b5651e2c4c056a99452064fe1e Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.365842 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.598632 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1266ba95-93bf-4fd0-8b3d-98309d61e84a","Type":"ContainerStarted","Data":"19e95d3ae5c42abc47efe4f1eb7af36ca61230f125c6db1d35e269beea7a8759"} Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.598672 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1266ba95-93bf-4fd0-8b3d-98309d61e84a","Type":"ContainerStarted","Data":"27324218c0e7cb2790da2b091e7def46c28aa8b5651e2c4c056a99452064fe1e"} Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.601495 4652 generic.go:334] "Generic (PLEG): container finished" podID="d07e49ea-3586-42bb-9af9-0f68d9823fec" containerID="c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c" exitCode=0 Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.601528 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d07e49ea-3586-42bb-9af9-0f68d9823fec","Type":"ContainerDied","Data":"c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c"} Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.601581 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d07e49ea-3586-42bb-9af9-0f68d9823fec","Type":"ContainerDied","Data":"d77b17d75b8ce833cc3dd1612ebb926379e81ebeb6f0068f5a8e5d602c163d03"} Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.601580 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.601594 4652 scope.go:117] "RemoveContainer" containerID="c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.615967 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.615950663 podStartE2EDuration="1.615950663s" podCreationTimestamp="2025-12-05 05:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:23.610950626 +0000 UTC m=+1185.847680893" watchObservedRunningTime="2025-12-05 05:46:23.615950663 +0000 UTC m=+1185.852680930" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.619976 4652 scope.go:117] "RemoveContainer" containerID="c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c" Dec 05 05:46:23 crc kubenswrapper[4652]: E1205 05:46:23.620262 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c\": container with ID starting with c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c not found: ID does not exist" containerID="c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.620298 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c"} err="failed to get container status \"c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c\": rpc error: code = NotFound desc = could not find container \"c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c\": container with ID starting with c76916070a94eef739313d8e818a0a22abc25db746a2d0fc43da7e2050d68f3c not found: ID does not exist" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.634725 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.645488 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.652881 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:23 crc kubenswrapper[4652]: E1205 05:46:23.653264 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d07e49ea-3586-42bb-9af9-0f68d9823fec" containerName="nova-scheduler-scheduler" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.653283 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d07e49ea-3586-42bb-9af9-0f68d9823fec" containerName="nova-scheduler-scheduler" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.653479 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d07e49ea-3586-42bb-9af9-0f68d9823fec" containerName="nova-scheduler-scheduler" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.654175 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.657661 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.668274 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.771350 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-config-data\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.771514 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2d42\" (UniqueName: \"kubernetes.io/projected/51fc5aa7-5723-4983-8e88-6b96fa157fff-kube-api-access-h2d42\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.771645 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.873644 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-config-data\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.873695 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2d42\" (UniqueName: \"kubernetes.io/projected/51fc5aa7-5723-4983-8e88-6b96fa157fff-kube-api-access-h2d42\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.873732 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.877762 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-config-data\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.877952 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.887431 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2d42\" (UniqueName: \"kubernetes.io/projected/51fc5aa7-5723-4983-8e88-6b96fa157fff-kube-api-access-h2d42\") pod \"nova-scheduler-0\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " pod="openstack/nova-scheduler-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.939907 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.939955 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 05:46:23 crc kubenswrapper[4652]: I1205 05:46:23.972205 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:46:24 crc kubenswrapper[4652]: I1205 05:46:24.134651 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81712ea8-c61d-4966-b51f-643b52063167" path="/var/lib/kubelet/pods/81712ea8-c61d-4966-b51f-643b52063167/volumes" Dec 05 05:46:24 crc kubenswrapper[4652]: I1205 05:46:24.135509 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d07e49ea-3586-42bb-9af9-0f68d9823fec" path="/var/lib/kubelet/pods/d07e49ea-3586-42bb-9af9-0f68d9823fec/volumes" Dec 05 05:46:24 crc kubenswrapper[4652]: I1205 05:46:24.366072 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:46:24 crc kubenswrapper[4652]: W1205 05:46:24.366244 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51fc5aa7_5723_4983_8e88_6b96fa157fff.slice/crio-7c6bf352e481f31ea1ad69d96a86abea6fc29272743f4011edf1f0ef80bf3562 WatchSource:0}: Error finding container 7c6bf352e481f31ea1ad69d96a86abea6fc29272743f4011edf1f0ef80bf3562: Status 404 returned error can't find the container with id 7c6bf352e481f31ea1ad69d96a86abea6fc29272743f4011edf1f0ef80bf3562 Dec 05 05:46:24 crc kubenswrapper[4652]: I1205 05:46:24.612345 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1266ba95-93bf-4fd0-8b3d-98309d61e84a","Type":"ContainerStarted","Data":"60c3574247f08834e4d120fb016a4f89aaabf9e719ce8faed3530537b0d72de7"} Dec 05 05:46:24 crc kubenswrapper[4652]: I1205 05:46:24.614278 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"51fc5aa7-5723-4983-8e88-6b96fa157fff","Type":"ContainerStarted","Data":"14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025"} Dec 05 05:46:24 crc kubenswrapper[4652]: I1205 05:46:24.614321 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"51fc5aa7-5723-4983-8e88-6b96fa157fff","Type":"ContainerStarted","Data":"7c6bf352e481f31ea1ad69d96a86abea6fc29272743f4011edf1f0ef80bf3562"} Dec 05 05:46:24 crc kubenswrapper[4652]: I1205 05:46:24.627662 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.627643312 podStartE2EDuration="1.627643312s" podCreationTimestamp="2025-12-05 05:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:24.626213665 +0000 UTC m=+1186.862943933" watchObservedRunningTime="2025-12-05 05:46:24.627643312 +0000 UTC m=+1186.864373579" Dec 05 05:46:26 crc kubenswrapper[4652]: I1205 05:46:26.915706 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 05:46:28 crc kubenswrapper[4652]: I1205 05:46:28.939590 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 05:46:28 crc kubenswrapper[4652]: I1205 05:46:28.939793 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 05:46:28 crc kubenswrapper[4652]: I1205 05:46:28.972467 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 05:46:29 crc kubenswrapper[4652]: I1205 05:46:29.950685 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 05:46:29 crc kubenswrapper[4652]: I1205 05:46:29.950702 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 05:46:30 crc kubenswrapper[4652]: I1205 05:46:30.492594 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 05:46:32 crc kubenswrapper[4652]: I1205 05:46:32.969044 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 05:46:32 crc kubenswrapper[4652]: I1205 05:46:32.969265 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 05:46:33 crc kubenswrapper[4652]: I1205 05:46:33.544412 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:46:33 crc kubenswrapper[4652]: I1205 05:46:33.544813 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="d88dcba1-b955-4ada-b5ae-c98555a02260" containerName="kube-state-metrics" containerID="cri-o://a517707eef52d14b8a88bb8fd7338af132eed0cb2f4cc6e50683d2f58cbb2713" gracePeriod=30 Dec 05 05:46:33 crc kubenswrapper[4652]: I1205 05:46:33.688779 4652 generic.go:334] "Generic (PLEG): container finished" podID="d88dcba1-b955-4ada-b5ae-c98555a02260" containerID="a517707eef52d14b8a88bb8fd7338af132eed0cb2f4cc6e50683d2f58cbb2713" exitCode=2 Dec 05 05:46:33 crc kubenswrapper[4652]: I1205 05:46:33.688816 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d88dcba1-b955-4ada-b5ae-c98555a02260","Type":"ContainerDied","Data":"a517707eef52d14b8a88bb8fd7338af132eed0cb2f4cc6e50683d2f58cbb2713"} Dec 05 05:46:33 crc kubenswrapper[4652]: I1205 05:46:33.972511 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 05:46:33 crc kubenswrapper[4652]: I1205 05:46:33.973761 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.011738 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.052712 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.052793 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.160459 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmzvx\" (UniqueName: \"kubernetes.io/projected/d88dcba1-b955-4ada-b5ae-c98555a02260-kube-api-access-vmzvx\") pod \"d88dcba1-b955-4ada-b5ae-c98555a02260\" (UID: \"d88dcba1-b955-4ada-b5ae-c98555a02260\") " Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.167766 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d88dcba1-b955-4ada-b5ae-c98555a02260-kube-api-access-vmzvx" (OuterVolumeSpecName: "kube-api-access-vmzvx") pod "d88dcba1-b955-4ada-b5ae-c98555a02260" (UID: "d88dcba1-b955-4ada-b5ae-c98555a02260"). InnerVolumeSpecName "kube-api-access-vmzvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.263203 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmzvx\" (UniqueName: \"kubernetes.io/projected/d88dcba1-b955-4ada-b5ae-c98555a02260-kube-api-access-vmzvx\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.699114 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d88dcba1-b955-4ada-b5ae-c98555a02260","Type":"ContainerDied","Data":"cb2edbf86e11ca5583760eddbb81ebf39674fa25b097c9da7ce58cab3c574abe"} Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.699151 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.699167 4652 scope.go:117] "RemoveContainer" containerID="a517707eef52d14b8a88bb8fd7338af132eed0cb2f4cc6e50683d2f58cbb2713" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.734982 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.738704 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.755637 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.766104 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:46:34 crc kubenswrapper[4652]: E1205 05:46:34.766573 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d88dcba1-b955-4ada-b5ae-c98555a02260" containerName="kube-state-metrics" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.766587 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d88dcba1-b955-4ada-b5ae-c98555a02260" containerName="kube-state-metrics" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.766851 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d88dcba1-b955-4ada-b5ae-c98555a02260" containerName="kube-state-metrics" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.767569 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.769532 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.769639 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.777322 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.873505 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkjc6\" (UniqueName: \"kubernetes.io/projected/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-api-access-qkjc6\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.873848 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.874094 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.874317 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.976715 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.976756 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.976826 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkjc6\" (UniqueName: \"kubernetes.io/projected/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-api-access-qkjc6\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.976864 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.980095 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.986070 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:34 crc kubenswrapper[4652]: I1205 05:46:34.997715 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkjc6\" (UniqueName: \"kubernetes.io/projected/36542228-ef73-49ec-ae3e-471911ca4fcc-kube-api-access-qkjc6\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.001661 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36542228-ef73-49ec-ae3e-471911ca4fcc-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"36542228-ef73-49ec-ae3e-471911ca4fcc\") " pod="openstack/kube-state-metrics-0" Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.089091 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.562641 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.605888 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.606133 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="ceilometer-central-agent" containerID="cri-o://7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7" gracePeriod=30 Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.606250 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="ceilometer-notification-agent" containerID="cri-o://d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2" gracePeriod=30 Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.606292 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="sg-core" containerID="cri-o://c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b" gracePeriod=30 Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.606382 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="proxy-httpd" containerID="cri-o://8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236" gracePeriod=30 Dec 05 05:46:35 crc kubenswrapper[4652]: I1205 05:46:35.710483 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"36542228-ef73-49ec-ae3e-471911ca4fcc","Type":"ContainerStarted","Data":"bb09ee25eb1c622b181c5dc5b97e2d29b1f837a4fa3b7a636757f18512a0e721"} Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.134435 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d88dcba1-b955-4ada-b5ae-c98555a02260" path="/var/lib/kubelet/pods/d88dcba1-b955-4ada-b5ae-c98555a02260/volumes" Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.720505 4652 generic.go:334] "Generic (PLEG): container finished" podID="e50d66ad-cabe-4917-a690-145be1633551" containerID="8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236" exitCode=0 Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.720543 4652 generic.go:334] "Generic (PLEG): container finished" podID="e50d66ad-cabe-4917-a690-145be1633551" containerID="c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b" exitCode=2 Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.720565 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerDied","Data":"8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236"} Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.720589 4652 generic.go:334] "Generic (PLEG): container finished" podID="e50d66ad-cabe-4917-a690-145be1633551" containerID="7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7" exitCode=0 Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.720603 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerDied","Data":"c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b"} Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.720618 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerDied","Data":"7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7"} Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.722122 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"36542228-ef73-49ec-ae3e-471911ca4fcc","Type":"ContainerStarted","Data":"032b237ed9cccb9195654cf8dd62f55b1f711e41e1972410885b71530a7cab5e"} Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.722263 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 05:46:36 crc kubenswrapper[4652]: I1205 05:46:36.738112 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.384151637 podStartE2EDuration="2.738095892s" podCreationTimestamp="2025-12-05 05:46:34 +0000 UTC" firstStartedPulling="2025-12-05 05:46:35.562646165 +0000 UTC m=+1197.799376432" lastFinishedPulling="2025-12-05 05:46:35.91659042 +0000 UTC m=+1198.153320687" observedRunningTime="2025-12-05 05:46:36.733671676 +0000 UTC m=+1198.970401944" watchObservedRunningTime="2025-12-05 05:46:36.738095892 +0000 UTC m=+1198.974826158" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.240046 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.342779 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-run-httpd\") pod \"e50d66ad-cabe-4917-a690-145be1633551\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.342888 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-combined-ca-bundle\") pod \"e50d66ad-cabe-4917-a690-145be1633551\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.343090 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-scripts\") pod \"e50d66ad-cabe-4917-a690-145be1633551\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.343107 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e50d66ad-cabe-4917-a690-145be1633551" (UID: "e50d66ad-cabe-4917-a690-145be1633551"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.343153 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-sg-core-conf-yaml\") pod \"e50d66ad-cabe-4917-a690-145be1633551\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.343334 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-config-data\") pod \"e50d66ad-cabe-4917-a690-145be1633551\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.343368 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-log-httpd\") pod \"e50d66ad-cabe-4917-a690-145be1633551\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.343616 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xj2vq\" (UniqueName: \"kubernetes.io/projected/e50d66ad-cabe-4917-a690-145be1633551-kube-api-access-xj2vq\") pod \"e50d66ad-cabe-4917-a690-145be1633551\" (UID: \"e50d66ad-cabe-4917-a690-145be1633551\") " Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.343852 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e50d66ad-cabe-4917-a690-145be1633551" (UID: "e50d66ad-cabe-4917-a690-145be1633551"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.344903 4652 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.344921 4652 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e50d66ad-cabe-4917-a690-145be1633551-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.349082 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-scripts" (OuterVolumeSpecName: "scripts") pod "e50d66ad-cabe-4917-a690-145be1633551" (UID: "e50d66ad-cabe-4917-a690-145be1633551"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.349964 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e50d66ad-cabe-4917-a690-145be1633551-kube-api-access-xj2vq" (OuterVolumeSpecName: "kube-api-access-xj2vq") pod "e50d66ad-cabe-4917-a690-145be1633551" (UID: "e50d66ad-cabe-4917-a690-145be1633551"). InnerVolumeSpecName "kube-api-access-xj2vq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.375852 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e50d66ad-cabe-4917-a690-145be1633551" (UID: "e50d66ad-cabe-4917-a690-145be1633551"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.427826 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-config-data" (OuterVolumeSpecName: "config-data") pod "e50d66ad-cabe-4917-a690-145be1633551" (UID: "e50d66ad-cabe-4917-a690-145be1633551"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.433528 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e50d66ad-cabe-4917-a690-145be1633551" (UID: "e50d66ad-cabe-4917-a690-145be1633551"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.447969 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.447993 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.448003 4652 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.448014 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e50d66ad-cabe-4917-a690-145be1633551-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.448024 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xj2vq\" (UniqueName: \"kubernetes.io/projected/e50d66ad-cabe-4917-a690-145be1633551-kube-api-access-xj2vq\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.742883 4652 generic.go:334] "Generic (PLEG): container finished" podID="e50d66ad-cabe-4917-a690-145be1633551" containerID="d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2" exitCode=0 Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.742966 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.742979 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerDied","Data":"d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2"} Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.743406 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e50d66ad-cabe-4917-a690-145be1633551","Type":"ContainerDied","Data":"44a01b34264039a3490b2b79082488cde693f1bfdfc27c5b99270efcaad1f212"} Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.743425 4652 scope.go:117] "RemoveContainer" containerID="8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.767208 4652 scope.go:117] "RemoveContainer" containerID="c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.776782 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.789627 4652 scope.go:117] "RemoveContainer" containerID="d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.789764 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802125 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:38 crc kubenswrapper[4652]: E1205 05:46:38.802593 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="proxy-httpd" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802611 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="proxy-httpd" Dec 05 05:46:38 crc kubenswrapper[4652]: E1205 05:46:38.802634 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="ceilometer-notification-agent" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802641 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="ceilometer-notification-agent" Dec 05 05:46:38 crc kubenswrapper[4652]: E1205 05:46:38.802658 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="ceilometer-central-agent" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802663 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="ceilometer-central-agent" Dec 05 05:46:38 crc kubenswrapper[4652]: E1205 05:46:38.802680 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="sg-core" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802685 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="sg-core" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802864 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="proxy-httpd" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802881 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="ceilometer-central-agent" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802889 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="sg-core" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.802916 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e50d66ad-cabe-4917-a690-145be1633551" containerName="ceilometer-notification-agent" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.804581 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.806809 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.813991 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.814296 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.822408 4652 scope.go:117] "RemoveContainer" containerID="7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.834832 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.848470 4652 scope.go:117] "RemoveContainer" containerID="8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236" Dec 05 05:46:38 crc kubenswrapper[4652]: E1205 05:46:38.848929 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236\": container with ID starting with 8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236 not found: ID does not exist" containerID="8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.848967 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236"} err="failed to get container status \"8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236\": rpc error: code = NotFound desc = could not find container \"8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236\": container with ID starting with 8d01047b1ec1a7cf758ee218a5c290d6a1982f989f96458a8edba4f9e9680236 not found: ID does not exist" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.848991 4652 scope.go:117] "RemoveContainer" containerID="c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b" Dec 05 05:46:38 crc kubenswrapper[4652]: E1205 05:46:38.849393 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b\": container with ID starting with c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b not found: ID does not exist" containerID="c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.849422 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b"} err="failed to get container status \"c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b\": rpc error: code = NotFound desc = could not find container \"c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b\": container with ID starting with c3efd016f7a631dc45c2028c98733aa77ab1bc080c9963d06d886dd29e41609b not found: ID does not exist" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.849454 4652 scope.go:117] "RemoveContainer" containerID="d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2" Dec 05 05:46:38 crc kubenswrapper[4652]: E1205 05:46:38.849746 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2\": container with ID starting with d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2 not found: ID does not exist" containerID="d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.849775 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2"} err="failed to get container status \"d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2\": rpc error: code = NotFound desc = could not find container \"d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2\": container with ID starting with d18b86866ebb69754005be1cd0dd99bbfb832f2d721fc8892d8512ed4d4593d2 not found: ID does not exist" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.849791 4652 scope.go:117] "RemoveContainer" containerID="7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7" Dec 05 05:46:38 crc kubenswrapper[4652]: E1205 05:46:38.850058 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7\": container with ID starting with 7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7 not found: ID does not exist" containerID="7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.850082 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7"} err="failed to get container status \"7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7\": rpc error: code = NotFound desc = could not find container \"7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7\": container with ID starting with 7b1cade84d2f4bca1492d0821f8f0c3ce4de034d029c00811ffe1d9cd3d7d2f7 not found: ID does not exist" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.944522 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.944609 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.948235 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.950082 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.958566 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ljqm\" (UniqueName: \"kubernetes.io/projected/b48f991b-8da3-428d-bf21-90ca1ad92772-kube-api-access-4ljqm\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.958637 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.959195 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.959254 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-scripts\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.959323 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-config-data\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.959454 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-run-httpd\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.959904 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:38 crc kubenswrapper[4652]: I1205 05:46:38.960071 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-log-httpd\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.062451 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.062504 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-scripts\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.062546 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-config-data\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.062602 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-run-httpd\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.062617 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.062631 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-log-httpd\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.062656 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ljqm\" (UniqueName: \"kubernetes.io/projected/b48f991b-8da3-428d-bf21-90ca1ad92772-kube-api-access-4ljqm\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.062685 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.063085 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-run-httpd\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.063293 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-log-httpd\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.066402 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.066924 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.066931 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.067480 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-scripts\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.067893 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-config-data\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.077527 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ljqm\" (UniqueName: \"kubernetes.io/projected/b48f991b-8da3-428d-bf21-90ca1ad92772-kube-api-access-4ljqm\") pod \"ceilometer-0\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.122792 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.521144 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:39 crc kubenswrapper[4652]: W1205 05:46:39.527148 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb48f991b_8da3_428d_bf21_90ca1ad92772.slice/crio-98d682c2f966fe44c93d9b712607c15a1cc52d93e31771a9d6db63c7121afeed WatchSource:0}: Error finding container 98d682c2f966fe44c93d9b712607c15a1cc52d93e31771a9d6db63c7121afeed: Status 404 returned error can't find the container with id 98d682c2f966fe44c93d9b712607c15a1cc52d93e31771a9d6db63c7121afeed Dec 05 05:46:39 crc kubenswrapper[4652]: I1205 05:46:39.753360 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerStarted","Data":"98d682c2f966fe44c93d9b712607c15a1cc52d93e31771a9d6db63c7121afeed"} Dec 05 05:46:40 crc kubenswrapper[4652]: I1205 05:46:40.134395 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e50d66ad-cabe-4917-a690-145be1633551" path="/var/lib/kubelet/pods/e50d66ad-cabe-4917-a690-145be1633551/volumes" Dec 05 05:46:40 crc kubenswrapper[4652]: I1205 05:46:40.761912 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerStarted","Data":"fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007"} Dec 05 05:46:41 crc kubenswrapper[4652]: I1205 05:46:41.775097 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerStarted","Data":"5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5"} Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.820755 4652 generic.go:334] "Generic (PLEG): container finished" podID="579de9cf-880a-49ee-a42d-6569627d4caa" containerID="1ae366a75281b020232cf9c323b30d4ae8bf6d5ef8ee1f4b804863da2f55d2a9" exitCode=137 Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.821202 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"579de9cf-880a-49ee-a42d-6569627d4caa","Type":"ContainerDied","Data":"1ae366a75281b020232cf9c323b30d4ae8bf6d5ef8ee1f4b804863da2f55d2a9"} Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.821228 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"579de9cf-880a-49ee-a42d-6569627d4caa","Type":"ContainerDied","Data":"495c8064b3554c76d22e94f3856c4d6df09329c6a49eb4c42dc473dc1829d3f3"} Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.821238 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="495c8064b3554c76d22e94f3856c4d6df09329c6a49eb4c42dc473dc1829d3f3" Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.824282 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerStarted","Data":"1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656"} Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.865042 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.980829 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.981230 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 05:46:42 crc kubenswrapper[4652]: I1205 05:46:42.985825 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.012215 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.036185 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-combined-ca-bundle\") pod \"579de9cf-880a-49ee-a42d-6569627d4caa\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.036319 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-config-data\") pod \"579de9cf-880a-49ee-a42d-6569627d4caa\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.036391 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kr7g\" (UniqueName: \"kubernetes.io/projected/579de9cf-880a-49ee-a42d-6569627d4caa-kube-api-access-6kr7g\") pod \"579de9cf-880a-49ee-a42d-6569627d4caa\" (UID: \"579de9cf-880a-49ee-a42d-6569627d4caa\") " Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.041176 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/579de9cf-880a-49ee-a42d-6569627d4caa-kube-api-access-6kr7g" (OuterVolumeSpecName: "kube-api-access-6kr7g") pod "579de9cf-880a-49ee-a42d-6569627d4caa" (UID: "579de9cf-880a-49ee-a42d-6569627d4caa"). InnerVolumeSpecName "kube-api-access-6kr7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.060733 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "579de9cf-880a-49ee-a42d-6569627d4caa" (UID: "579de9cf-880a-49ee-a42d-6569627d4caa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.062731 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-config-data" (OuterVolumeSpecName: "config-data") pod "579de9cf-880a-49ee-a42d-6569627d4caa" (UID: "579de9cf-880a-49ee-a42d-6569627d4caa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.138625 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.138650 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/579de9cf-880a-49ee-a42d-6569627d4caa-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.138659 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kr7g\" (UniqueName: \"kubernetes.io/projected/579de9cf-880a-49ee-a42d-6569627d4caa-kube-api-access-6kr7g\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.833513 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerStarted","Data":"70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d"} Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.833771 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.833538 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.841047 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.853967 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.341177703 podStartE2EDuration="5.85395615s" podCreationTimestamp="2025-12-05 05:46:38 +0000 UTC" firstStartedPulling="2025-12-05 05:46:39.529611411 +0000 UTC m=+1201.766341678" lastFinishedPulling="2025-12-05 05:46:43.042389858 +0000 UTC m=+1205.279120125" observedRunningTime="2025-12-05 05:46:43.84754977 +0000 UTC m=+1206.084280036" watchObservedRunningTime="2025-12-05 05:46:43.85395615 +0000 UTC m=+1206.090686417" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.866871 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.873616 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.894335 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:43 crc kubenswrapper[4652]: E1205 05:46:43.894844 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="579de9cf-880a-49ee-a42d-6569627d4caa" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.894863 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="579de9cf-880a-49ee-a42d-6569627d4caa" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.895109 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="579de9cf-880a-49ee-a42d-6569627d4caa" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.895895 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.897657 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.903138 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.903766 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 05:46:43 crc kubenswrapper[4652]: I1205 05:46:43.903913 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.013629 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-bmqg6"] Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.015355 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.036283 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-bmqg6"] Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.058205 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.058397 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwfvz\" (UniqueName: \"kubernetes.io/projected/ed6932a0-b19b-405a-9266-f19d3c39ecae-kube-api-access-xwfvz\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.058444 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.058479 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.059460 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.137934 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="579de9cf-880a-49ee-a42d-6569627d4caa" path="/var/lib/kubelet/pods/579de9cf-880a-49ee-a42d-6569627d4caa/volumes" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161099 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwfvz\" (UniqueName: \"kubernetes.io/projected/ed6932a0-b19b-405a-9266-f19d3c39ecae-kube-api-access-xwfvz\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161172 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161238 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161266 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-sb\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161384 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161492 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161515 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-swift-storage-0\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161574 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-svc\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161705 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2flc4\" (UniqueName: \"kubernetes.io/projected/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-kube-api-access-2flc4\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161810 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-config\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.161897 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-nb\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.167529 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.170679 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.171143 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.172418 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed6932a0-b19b-405a-9266-f19d3c39ecae-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.197442 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwfvz\" (UniqueName: \"kubernetes.io/projected/ed6932a0-b19b-405a-9266-f19d3c39ecae-kube-api-access-xwfvz\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed6932a0-b19b-405a-9266-f19d3c39ecae\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.230793 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.264753 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2flc4\" (UniqueName: \"kubernetes.io/projected/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-kube-api-access-2flc4\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.264829 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-config\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.265669 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-config\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.266153 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-nb\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.266205 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-sb\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.266262 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-swift-storage-0\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.266294 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-svc\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.266913 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-svc\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.267382 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-nb\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.268145 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-sb\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.268971 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-swift-storage-0\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.280156 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2flc4\" (UniqueName: \"kubernetes.io/projected/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-kube-api-access-2flc4\") pod \"dnsmasq-dns-54599d8f7-bmqg6\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.352947 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:44 crc kubenswrapper[4652]: W1205 05:46:44.676800 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded6932a0_b19b_405a_9266_f19d3c39ecae.slice/crio-b12b2fe08ba670fa560f207a24131eeb1d73b8acb02a0f324476d9e9bab17552 WatchSource:0}: Error finding container b12b2fe08ba670fa560f207a24131eeb1d73b8acb02a0f324476d9e9bab17552: Status 404 returned error can't find the container with id b12b2fe08ba670fa560f207a24131eeb1d73b8acb02a0f324476d9e9bab17552 Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.680223 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.845091 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ed6932a0-b19b-405a-9266-f19d3c39ecae","Type":"ContainerStarted","Data":"b12b2fe08ba670fa560f207a24131eeb1d73b8acb02a0f324476d9e9bab17552"} Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.845390 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 05:46:44 crc kubenswrapper[4652]: I1205 05:46:44.876843 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-bmqg6"] Dec 05 05:46:44 crc kubenswrapper[4652]: W1205 05:46:44.878666 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2df2c65_9ffc_41ff_abcf_14b13aee5e97.slice/crio-238025f236b9bb0421cbaac6910a1a042faac4f05204000bc3b57847d69b4b72 WatchSource:0}: Error finding container 238025f236b9bb0421cbaac6910a1a042faac4f05204000bc3b57847d69b4b72: Status 404 returned error can't find the container with id 238025f236b9bb0421cbaac6910a1a042faac4f05204000bc3b57847d69b4b72 Dec 05 05:46:45 crc kubenswrapper[4652]: I1205 05:46:45.097244 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 05:46:45 crc kubenswrapper[4652]: I1205 05:46:45.853118 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ed6932a0-b19b-405a-9266-f19d3c39ecae","Type":"ContainerStarted","Data":"156a0544d4e95ef8c9526bba2a2a55f062a6ddb64279f5ef1228da1302bc5f4b"} Dec 05 05:46:45 crc kubenswrapper[4652]: I1205 05:46:45.855160 4652 generic.go:334] "Generic (PLEG): container finished" podID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" containerID="65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9" exitCode=0 Dec 05 05:46:45 crc kubenswrapper[4652]: I1205 05:46:45.855237 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" event={"ID":"c2df2c65-9ffc-41ff-abcf-14b13aee5e97","Type":"ContainerDied","Data":"65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9"} Dec 05 05:46:45 crc kubenswrapper[4652]: I1205 05:46:45.855260 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" event={"ID":"c2df2c65-9ffc-41ff-abcf-14b13aee5e97","Type":"ContainerStarted","Data":"238025f236b9bb0421cbaac6910a1a042faac4f05204000bc3b57847d69b4b72"} Dec 05 05:46:45 crc kubenswrapper[4652]: I1205 05:46:45.874278 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.874265847 podStartE2EDuration="2.874265847s" podCreationTimestamp="2025-12-05 05:46:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:45.871817686 +0000 UTC m=+1208.108547953" watchObservedRunningTime="2025-12-05 05:46:45.874265847 +0000 UTC m=+1208.110996113" Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.286619 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.350744 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.863920 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" event={"ID":"c2df2c65-9ffc-41ff-abcf-14b13aee5e97","Type":"ContainerStarted","Data":"3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9"} Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.864163 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="sg-core" containerID="cri-o://1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656" gracePeriod=30 Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.864165 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="ceilometer-notification-agent" containerID="cri-o://5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5" gracePeriod=30 Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.864131 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="ceilometer-central-agent" containerID="cri-o://fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007" gracePeriod=30 Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.864176 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="proxy-httpd" containerID="cri-o://70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d" gracePeriod=30 Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.864971 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-log" containerID="cri-o://19e95d3ae5c42abc47efe4f1eb7af36ca61230f125c6db1d35e269beea7a8759" gracePeriod=30 Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.864989 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.865035 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-api" containerID="cri-o://60c3574247f08834e4d120fb016a4f89aaabf9e719ce8faed3530537b0d72de7" gracePeriod=30 Dec 05 05:46:46 crc kubenswrapper[4652]: I1205 05:46:46.884821 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" podStartSLOduration=3.884808314 podStartE2EDuration="3.884808314s" podCreationTimestamp="2025-12-05 05:46:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:46.879908145 +0000 UTC m=+1209.116638412" watchObservedRunningTime="2025-12-05 05:46:46.884808314 +0000 UTC m=+1209.121538581" Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.875214 4652 generic.go:334] "Generic (PLEG): container finished" podID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerID="70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d" exitCode=0 Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.875655 4652 generic.go:334] "Generic (PLEG): container finished" podID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerID="1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656" exitCode=2 Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.875668 4652 generic.go:334] "Generic (PLEG): container finished" podID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerID="5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5" exitCode=0 Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.875718 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerDied","Data":"70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d"} Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.875743 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerDied","Data":"1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656"} Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.875754 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerDied","Data":"5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5"} Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.878194 4652 generic.go:334] "Generic (PLEG): container finished" podID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerID="60c3574247f08834e4d120fb016a4f89aaabf9e719ce8faed3530537b0d72de7" exitCode=0 Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.878215 4652 generic.go:334] "Generic (PLEG): container finished" podID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerID="19e95d3ae5c42abc47efe4f1eb7af36ca61230f125c6db1d35e269beea7a8759" exitCode=143 Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.878658 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1266ba95-93bf-4fd0-8b3d-98309d61e84a","Type":"ContainerDied","Data":"60c3574247f08834e4d120fb016a4f89aaabf9e719ce8faed3530537b0d72de7"} Dec 05 05:46:47 crc kubenswrapper[4652]: I1205 05:46:47.878705 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1266ba95-93bf-4fd0-8b3d-98309d61e84a","Type":"ContainerDied","Data":"19e95d3ae5c42abc47efe4f1eb7af36ca61230f125c6db1d35e269beea7a8759"} Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.106721 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.241855 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1266ba95-93bf-4fd0-8b3d-98309d61e84a-logs\") pod \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.242004 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-config-data\") pod \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.242167 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p79ns\" (UniqueName: \"kubernetes.io/projected/1266ba95-93bf-4fd0-8b3d-98309d61e84a-kube-api-access-p79ns\") pod \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.242269 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-combined-ca-bundle\") pod \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\" (UID: \"1266ba95-93bf-4fd0-8b3d-98309d61e84a\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.242959 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1266ba95-93bf-4fd0-8b3d-98309d61e84a-logs" (OuterVolumeSpecName: "logs") pod "1266ba95-93bf-4fd0-8b3d-98309d61e84a" (UID: "1266ba95-93bf-4fd0-8b3d-98309d61e84a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.262542 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1266ba95-93bf-4fd0-8b3d-98309d61e84a-kube-api-access-p79ns" (OuterVolumeSpecName: "kube-api-access-p79ns") pod "1266ba95-93bf-4fd0-8b3d-98309d61e84a" (UID: "1266ba95-93bf-4fd0-8b3d-98309d61e84a"). InnerVolumeSpecName "kube-api-access-p79ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.306021 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1266ba95-93bf-4fd0-8b3d-98309d61e84a" (UID: "1266ba95-93bf-4fd0-8b3d-98309d61e84a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.307661 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-config-data" (OuterVolumeSpecName: "config-data") pod "1266ba95-93bf-4fd0-8b3d-98309d61e84a" (UID: "1266ba95-93bf-4fd0-8b3d-98309d61e84a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.351274 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p79ns\" (UniqueName: \"kubernetes.io/projected/1266ba95-93bf-4fd0-8b3d-98309d61e84a-kube-api-access-p79ns\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.351317 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.351330 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1266ba95-93bf-4fd0-8b3d-98309d61e84a-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.351340 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1266ba95-93bf-4fd0-8b3d-98309d61e84a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.521292 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663021 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-run-httpd\") pod \"b48f991b-8da3-428d-bf21-90ca1ad92772\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663344 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ljqm\" (UniqueName: \"kubernetes.io/projected/b48f991b-8da3-428d-bf21-90ca1ad92772-kube-api-access-4ljqm\") pod \"b48f991b-8da3-428d-bf21-90ca1ad92772\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663439 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-scripts\") pod \"b48f991b-8da3-428d-bf21-90ca1ad92772\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663495 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-log-httpd\") pod \"b48f991b-8da3-428d-bf21-90ca1ad92772\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663607 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-sg-core-conf-yaml\") pod \"b48f991b-8da3-428d-bf21-90ca1ad92772\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663645 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-ceilometer-tls-certs\") pod \"b48f991b-8da3-428d-bf21-90ca1ad92772\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663703 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-combined-ca-bundle\") pod \"b48f991b-8da3-428d-bf21-90ca1ad92772\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663747 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-config-data\") pod \"b48f991b-8da3-428d-bf21-90ca1ad92772\" (UID: \"b48f991b-8da3-428d-bf21-90ca1ad92772\") " Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.663876 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b48f991b-8da3-428d-bf21-90ca1ad92772" (UID: "b48f991b-8da3-428d-bf21-90ca1ad92772"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.664093 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b48f991b-8da3-428d-bf21-90ca1ad92772" (UID: "b48f991b-8da3-428d-bf21-90ca1ad92772"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.665054 4652 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.665073 4652 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b48f991b-8da3-428d-bf21-90ca1ad92772-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.668238 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b48f991b-8da3-428d-bf21-90ca1ad92772-kube-api-access-4ljqm" (OuterVolumeSpecName: "kube-api-access-4ljqm") pod "b48f991b-8da3-428d-bf21-90ca1ad92772" (UID: "b48f991b-8da3-428d-bf21-90ca1ad92772"). InnerVolumeSpecName "kube-api-access-4ljqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.668642 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-scripts" (OuterVolumeSpecName: "scripts") pod "b48f991b-8da3-428d-bf21-90ca1ad92772" (UID: "b48f991b-8da3-428d-bf21-90ca1ad92772"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.687644 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b48f991b-8da3-428d-bf21-90ca1ad92772" (UID: "b48f991b-8da3-428d-bf21-90ca1ad92772"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.708778 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b48f991b-8da3-428d-bf21-90ca1ad92772" (UID: "b48f991b-8da3-428d-bf21-90ca1ad92772"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.725017 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b48f991b-8da3-428d-bf21-90ca1ad92772" (UID: "b48f991b-8da3-428d-bf21-90ca1ad92772"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.741473 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-config-data" (OuterVolumeSpecName: "config-data") pod "b48f991b-8da3-428d-bf21-90ca1ad92772" (UID: "b48f991b-8da3-428d-bf21-90ca1ad92772"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.766500 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.766526 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ljqm\" (UniqueName: \"kubernetes.io/projected/b48f991b-8da3-428d-bf21-90ca1ad92772-kube-api-access-4ljqm\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.766539 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.766547 4652 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.766569 4652 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.766577 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b48f991b-8da3-428d-bf21-90ca1ad92772-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.887836 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.888286 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1266ba95-93bf-4fd0-8b3d-98309d61e84a","Type":"ContainerDied","Data":"27324218c0e7cb2790da2b091e7def46c28aa8b5651e2c4c056a99452064fe1e"} Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.888331 4652 scope.go:117] "RemoveContainer" containerID="60c3574247f08834e4d120fb016a4f89aaabf9e719ce8faed3530537b0d72de7" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.892079 4652 generic.go:334] "Generic (PLEG): container finished" podID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerID="fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007" exitCode=0 Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.893240 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.895784 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerDied","Data":"fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007"} Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.895817 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b48f991b-8da3-428d-bf21-90ca1ad92772","Type":"ContainerDied","Data":"98d682c2f966fe44c93d9b712607c15a1cc52d93e31771a9d6db63c7121afeed"} Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.915123 4652 scope.go:117] "RemoveContainer" containerID="19e95d3ae5c42abc47efe4f1eb7af36ca61230f125c6db1d35e269beea7a8759" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.915899 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.938601 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.943765 4652 scope.go:117] "RemoveContainer" containerID="70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.952589 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.962358 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.968996 4652 scope.go:117] "RemoveContainer" containerID="1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.971614 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:48 crc kubenswrapper[4652]: E1205 05:46:48.972058 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="proxy-httpd" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972079 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="proxy-httpd" Dec 05 05:46:48 crc kubenswrapper[4652]: E1205 05:46:48.972094 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="ceilometer-notification-agent" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972100 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="ceilometer-notification-agent" Dec 05 05:46:48 crc kubenswrapper[4652]: E1205 05:46:48.972114 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="ceilometer-central-agent" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972120 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="ceilometer-central-agent" Dec 05 05:46:48 crc kubenswrapper[4652]: E1205 05:46:48.972136 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-api" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972142 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-api" Dec 05 05:46:48 crc kubenswrapper[4652]: E1205 05:46:48.972157 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-log" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972163 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-log" Dec 05 05:46:48 crc kubenswrapper[4652]: E1205 05:46:48.972181 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="sg-core" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972186 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="sg-core" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972362 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="proxy-httpd" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972388 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-api" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972401 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="sg-core" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972417 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="ceilometer-notification-agent" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972438 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" containerName="nova-api-log" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.972450 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" containerName="ceilometer-central-agent" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.973468 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.975624 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.976034 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.976062 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.978487 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.983980 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.984010 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.984950 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.985677 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.985825 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.992681 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:48 crc kubenswrapper[4652]: I1205 05:46:48.994213 4652 scope.go:117] "RemoveContainer" containerID="5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.013878 4652 scope.go:117] "RemoveContainer" containerID="fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072264 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/714dfe3f-61b7-4a86-88bf-3eaf640a4437-run-httpd\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072388 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q9cv\" (UniqueName: \"kubernetes.io/projected/714dfe3f-61b7-4a86-88bf-3eaf640a4437-kube-api-access-7q9cv\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072418 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072480 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072504 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4594de24-91d2-408d-81a8-0d6d0b1e6324-logs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072534 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpg8r\" (UniqueName: \"kubernetes.io/projected/4594de24-91d2-408d-81a8-0d6d0b1e6324-kube-api-access-xpg8r\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072576 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-scripts\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072596 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-config-data\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072648 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072669 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-config-data\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072740 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072765 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-public-tls-certs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072836 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/714dfe3f-61b7-4a86-88bf-3eaf640a4437-log-httpd\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.072857 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.076516 4652 scope.go:117] "RemoveContainer" containerID="70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d" Dec 05 05:46:49 crc kubenswrapper[4652]: E1205 05:46:49.076939 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d\": container with ID starting with 70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d not found: ID does not exist" containerID="70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.076970 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d"} err="failed to get container status \"70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d\": rpc error: code = NotFound desc = could not find container \"70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d\": container with ID starting with 70aefda48178d7a0e9b6ab39559ab2024f54edea7dc0f81d7d419b094d92e01d not found: ID does not exist" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.076994 4652 scope.go:117] "RemoveContainer" containerID="1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656" Dec 05 05:46:49 crc kubenswrapper[4652]: E1205 05:46:49.077410 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656\": container with ID starting with 1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656 not found: ID does not exist" containerID="1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.077463 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656"} err="failed to get container status \"1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656\": rpc error: code = NotFound desc = could not find container \"1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656\": container with ID starting with 1ad60ac861753d5abcc42d7fda2ba680a1f8e09588523bc632e30069d01ec656 not found: ID does not exist" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.077508 4652 scope.go:117] "RemoveContainer" containerID="5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5" Dec 05 05:46:49 crc kubenswrapper[4652]: E1205 05:46:49.077869 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5\": container with ID starting with 5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5 not found: ID does not exist" containerID="5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.077890 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5"} err="failed to get container status \"5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5\": rpc error: code = NotFound desc = could not find container \"5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5\": container with ID starting with 5975907bd5a355c3ac66a85cfdcdfc78fa5af2aecee8fdd91cddbe844c3589b5 not found: ID does not exist" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.077903 4652 scope.go:117] "RemoveContainer" containerID="fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007" Dec 05 05:46:49 crc kubenswrapper[4652]: E1205 05:46:49.078119 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007\": container with ID starting with fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007 not found: ID does not exist" containerID="fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.078150 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007"} err="failed to get container status \"fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007\": rpc error: code = NotFound desc = could not find container \"fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007\": container with ID starting with fcee1188e4ed99b57813f03ba4fc17de8eae6fb8e397e1997ea195661dca9007 not found: ID does not exist" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173745 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q9cv\" (UniqueName: \"kubernetes.io/projected/714dfe3f-61b7-4a86-88bf-3eaf640a4437-kube-api-access-7q9cv\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173781 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173822 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173841 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4594de24-91d2-408d-81a8-0d6d0b1e6324-logs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173865 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpg8r\" (UniqueName: \"kubernetes.io/projected/4594de24-91d2-408d-81a8-0d6d0b1e6324-kube-api-access-xpg8r\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173888 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-scripts\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173907 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-config-data\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173940 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173956 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-config-data\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173978 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.173998 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-public-tls-certs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.174022 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/714dfe3f-61b7-4a86-88bf-3eaf640a4437-log-httpd\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.174038 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.174065 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/714dfe3f-61b7-4a86-88bf-3eaf640a4437-run-httpd\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.174259 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4594de24-91d2-408d-81a8-0d6d0b1e6324-logs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.174594 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/714dfe3f-61b7-4a86-88bf-3eaf640a4437-log-httpd\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.174767 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/714dfe3f-61b7-4a86-88bf-3eaf640a4437-run-httpd\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.177081 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.177104 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.177876 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.178387 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.178716 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-config-data\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.179059 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-public-tls-certs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.179872 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-config-data\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.185294 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.188012 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpg8r\" (UniqueName: \"kubernetes.io/projected/4594de24-91d2-408d-81a8-0d6d0b1e6324-kube-api-access-xpg8r\") pod \"nova-api-0\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.198200 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q9cv\" (UniqueName: \"kubernetes.io/projected/714dfe3f-61b7-4a86-88bf-3eaf640a4437-kube-api-access-7q9cv\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.199440 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/714dfe3f-61b7-4a86-88bf-3eaf640a4437-scripts\") pod \"ceilometer-0\" (UID: \"714dfe3f-61b7-4a86-88bf-3eaf640a4437\") " pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.231042 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.298836 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.311970 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.711449 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:46:49 crc kubenswrapper[4652]: W1205 05:46:49.712507 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4594de24_91d2_408d_81a8_0d6d0b1e6324.slice/crio-1fcbed19aed7de005895f7ce11e21e4fc130fc88400312c6d6ad2b9778c621e7 WatchSource:0}: Error finding container 1fcbed19aed7de005895f7ce11e21e4fc130fc88400312c6d6ad2b9778c621e7: Status 404 returned error can't find the container with id 1fcbed19aed7de005895f7ce11e21e4fc130fc88400312c6d6ad2b9778c621e7 Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.781479 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 05:46:49 crc kubenswrapper[4652]: W1205 05:46:49.789550 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod714dfe3f_61b7_4a86_88bf_3eaf640a4437.slice/crio-cb7cfb2570fd6f1a2d9abfca950b327b95026809e7cee701c5e97d39063e5d91 WatchSource:0}: Error finding container cb7cfb2570fd6f1a2d9abfca950b327b95026809e7cee701c5e97d39063e5d91: Status 404 returned error can't find the container with id cb7cfb2570fd6f1a2d9abfca950b327b95026809e7cee701c5e97d39063e5d91 Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.900748 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"714dfe3f-61b7-4a86-88bf-3eaf640a4437","Type":"ContainerStarted","Data":"cb7cfb2570fd6f1a2d9abfca950b327b95026809e7cee701c5e97d39063e5d91"} Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.903200 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4594de24-91d2-408d-81a8-0d6d0b1e6324","Type":"ContainerStarted","Data":"83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228"} Dec 05 05:46:49 crc kubenswrapper[4652]: I1205 05:46:49.903318 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4594de24-91d2-408d-81a8-0d6d0b1e6324","Type":"ContainerStarted","Data":"1fcbed19aed7de005895f7ce11e21e4fc130fc88400312c6d6ad2b9778c621e7"} Dec 05 05:46:50 crc kubenswrapper[4652]: I1205 05:46:50.133570 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1266ba95-93bf-4fd0-8b3d-98309d61e84a" path="/var/lib/kubelet/pods/1266ba95-93bf-4fd0-8b3d-98309d61e84a/volumes" Dec 05 05:46:50 crc kubenswrapper[4652]: I1205 05:46:50.134379 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b48f991b-8da3-428d-bf21-90ca1ad92772" path="/var/lib/kubelet/pods/b48f991b-8da3-428d-bf21-90ca1ad92772/volumes" Dec 05 05:46:50 crc kubenswrapper[4652]: I1205 05:46:50.913727 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"714dfe3f-61b7-4a86-88bf-3eaf640a4437","Type":"ContainerStarted","Data":"e4b712b373fb981f3f52ff37b1406189cdaed350e48a9c12a89a9dc5336ac074"} Dec 05 05:46:50 crc kubenswrapper[4652]: I1205 05:46:50.915691 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4594de24-91d2-408d-81a8-0d6d0b1e6324","Type":"ContainerStarted","Data":"608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e"} Dec 05 05:46:50 crc kubenswrapper[4652]: I1205 05:46:50.933373 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.933356468 podStartE2EDuration="2.933356468s" podCreationTimestamp="2025-12-05 05:46:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:50.927174611 +0000 UTC m=+1213.163904879" watchObservedRunningTime="2025-12-05 05:46:50.933356468 +0000 UTC m=+1213.170086736" Dec 05 05:46:51 crc kubenswrapper[4652]: I1205 05:46:51.926710 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"714dfe3f-61b7-4a86-88bf-3eaf640a4437","Type":"ContainerStarted","Data":"c5aa5aad0aa07e96796459eca9b452036bc76bf018f8bda86378a67dc37bc945"} Dec 05 05:46:52 crc kubenswrapper[4652]: I1205 05:46:52.936003 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"714dfe3f-61b7-4a86-88bf-3eaf640a4437","Type":"ContainerStarted","Data":"3ca7256f8c84f85dd59d967c4fd66386d817d2e3ec35c98567eafbf885e72d3b"} Dec 05 05:46:53 crc kubenswrapper[4652]: I1205 05:46:53.948024 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"714dfe3f-61b7-4a86-88bf-3eaf640a4437","Type":"ContainerStarted","Data":"e26c57b744c3faa2c3e9bf3df7e62b3244e1cf048edb2953b74801e5f32d3ee8"} Dec 05 05:46:53 crc kubenswrapper[4652]: I1205 05:46:53.948258 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 05:46:53 crc kubenswrapper[4652]: I1205 05:46:53.979657 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.598586308 podStartE2EDuration="5.979618253s" podCreationTimestamp="2025-12-05 05:46:48 +0000 UTC" firstStartedPulling="2025-12-05 05:46:49.791779469 +0000 UTC m=+1212.028509737" lastFinishedPulling="2025-12-05 05:46:53.172811415 +0000 UTC m=+1215.409541682" observedRunningTime="2025-12-05 05:46:53.971770674 +0000 UTC m=+1216.208500942" watchObservedRunningTime="2025-12-05 05:46:53.979618253 +0000 UTC m=+1216.216348520" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.231207 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.247250 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.358582 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.404218 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-r5bbx"] Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.404417 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" podUID="d5e284f4-329e-47be-862a-96dd0d66c5fd" containerName="dnsmasq-dns" containerID="cri-o://cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62" gracePeriod=10 Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.797011 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.882534 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-sb\") pod \"d5e284f4-329e-47be-862a-96dd0d66c5fd\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.882635 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-nb\") pod \"d5e284f4-329e-47be-862a-96dd0d66c5fd\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.882671 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-config\") pod \"d5e284f4-329e-47be-862a-96dd0d66c5fd\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.882746 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spfh7\" (UniqueName: \"kubernetes.io/projected/d5e284f4-329e-47be-862a-96dd0d66c5fd-kube-api-access-spfh7\") pod \"d5e284f4-329e-47be-862a-96dd0d66c5fd\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.882797 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-swift-storage-0\") pod \"d5e284f4-329e-47be-862a-96dd0d66c5fd\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.882818 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-svc\") pod \"d5e284f4-329e-47be-862a-96dd0d66c5fd\" (UID: \"d5e284f4-329e-47be-862a-96dd0d66c5fd\") " Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.887926 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5e284f4-329e-47be-862a-96dd0d66c5fd-kube-api-access-spfh7" (OuterVolumeSpecName: "kube-api-access-spfh7") pod "d5e284f4-329e-47be-862a-96dd0d66c5fd" (UID: "d5e284f4-329e-47be-862a-96dd0d66c5fd"). InnerVolumeSpecName "kube-api-access-spfh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.921004 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-config" (OuterVolumeSpecName: "config") pod "d5e284f4-329e-47be-862a-96dd0d66c5fd" (UID: "d5e284f4-329e-47be-862a-96dd0d66c5fd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.921688 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d5e284f4-329e-47be-862a-96dd0d66c5fd" (UID: "d5e284f4-329e-47be-862a-96dd0d66c5fd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.924861 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d5e284f4-329e-47be-862a-96dd0d66c5fd" (UID: "d5e284f4-329e-47be-862a-96dd0d66c5fd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.926896 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d5e284f4-329e-47be-862a-96dd0d66c5fd" (UID: "d5e284f4-329e-47be-862a-96dd0d66c5fd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.929044 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d5e284f4-329e-47be-862a-96dd0d66c5fd" (UID: "d5e284f4-329e-47be-862a-96dd0d66c5fd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.959950 4652 generic.go:334] "Generic (PLEG): container finished" podID="d5e284f4-329e-47be-862a-96dd0d66c5fd" containerID="cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62" exitCode=0 Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.960005 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.960033 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" event={"ID":"d5e284f4-329e-47be-862a-96dd0d66c5fd","Type":"ContainerDied","Data":"cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62"} Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.960093 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-844fc57f6f-r5bbx" event={"ID":"d5e284f4-329e-47be-862a-96dd0d66c5fd","Type":"ContainerDied","Data":"a2a1350d4eddd8ec53528335912191c2d1682d5266f631512fbc75b6639fadc6"} Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.960112 4652 scope.go:117] "RemoveContainer" containerID="cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.974074 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.984872 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spfh7\" (UniqueName: \"kubernetes.io/projected/d5e284f4-329e-47be-862a-96dd0d66c5fd-kube-api-access-spfh7\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.984893 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.984902 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.984911 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.984919 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.984928 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5e284f4-329e-47be-862a-96dd0d66c5fd-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:46:54 crc kubenswrapper[4652]: I1205 05:46:54.986818 4652 scope.go:117] "RemoveContainer" containerID="9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.021083 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-r5bbx"] Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.028440 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-844fc57f6f-r5bbx"] Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.034693 4652 scope.go:117] "RemoveContainer" containerID="cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62" Dec 05 05:46:55 crc kubenswrapper[4652]: E1205 05:46:55.035465 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62\": container with ID starting with cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62 not found: ID does not exist" containerID="cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.035503 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62"} err="failed to get container status \"cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62\": rpc error: code = NotFound desc = could not find container \"cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62\": container with ID starting with cdaf526d3987c016666b21a3517d72a5154abccb90b17db7339c7983a97d4e62 not found: ID does not exist" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.035571 4652 scope.go:117] "RemoveContainer" containerID="9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f" Dec 05 05:46:55 crc kubenswrapper[4652]: E1205 05:46:55.035851 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f\": container with ID starting with 9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f not found: ID does not exist" containerID="9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.035867 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f"} err="failed to get container status \"9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f\": rpc error: code = NotFound desc = could not find container \"9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f\": container with ID starting with 9d813dd136c33224d027f88f0bbada3b796e87f77e24fbff984564dc31957c8f not found: ID does not exist" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.189502 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-zn44j"] Dec 05 05:46:55 crc kubenswrapper[4652]: E1205 05:46:55.190132 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5e284f4-329e-47be-862a-96dd0d66c5fd" containerName="dnsmasq-dns" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.190151 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5e284f4-329e-47be-862a-96dd0d66c5fd" containerName="dnsmasq-dns" Dec 05 05:46:55 crc kubenswrapper[4652]: E1205 05:46:55.190191 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5e284f4-329e-47be-862a-96dd0d66c5fd" containerName="init" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.190198 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5e284f4-329e-47be-862a-96dd0d66c5fd" containerName="init" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.190384 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5e284f4-329e-47be-862a-96dd0d66c5fd" containerName="dnsmasq-dns" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.191122 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: W1205 05:46:55.192719 4652 reflector.go:561] object-"openstack"/"nova-cell1-manage-config-data": failed to list *v1.Secret: secrets "nova-cell1-manage-config-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 05 05:46:55 crc kubenswrapper[4652]: W1205 05:46:55.192734 4652 reflector.go:561] object-"openstack"/"nova-cell1-manage-scripts": failed to list *v1.Secret: secrets "nova-cell1-manage-scripts" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 05 05:46:55 crc kubenswrapper[4652]: E1205 05:46:55.192755 4652 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"nova-cell1-manage-config-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"nova-cell1-manage-config-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 05:46:55 crc kubenswrapper[4652]: E1205 05:46:55.192767 4652 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"nova-cell1-manage-scripts\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"nova-cell1-manage-scripts\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.205605 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zn44j"] Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.290495 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.290536 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnxkr\" (UniqueName: \"kubernetes.io/projected/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-kube-api-access-wnxkr\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.290799 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.291018 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-scripts\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.392659 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.392746 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-scripts\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.392816 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.392838 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnxkr\" (UniqueName: \"kubernetes.io/projected/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-kube-api-access-wnxkr\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.396812 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:55 crc kubenswrapper[4652]: I1205 05:46:55.406056 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnxkr\" (UniqueName: \"kubernetes.io/projected/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-kube-api-access-wnxkr\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:56 crc kubenswrapper[4652]: I1205 05:46:56.123232 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 05:46:56 crc kubenswrapper[4652]: I1205 05:46:56.134236 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5e284f4-329e-47be-862a-96dd0d66c5fd" path="/var/lib/kubelet/pods/d5e284f4-329e-47be-862a-96dd0d66c5fd/volumes" Dec 05 05:46:56 crc kubenswrapper[4652]: I1205 05:46:56.137899 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-scripts\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:56 crc kubenswrapper[4652]: E1205 05:46:56.392960 4652 secret.go:188] Couldn't get secret openstack/nova-cell1-manage-config-data: failed to sync secret cache: timed out waiting for the condition Dec 05 05:46:56 crc kubenswrapper[4652]: E1205 05:46:56.393221 4652 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data podName:b873dbf3-75d1-4cdf-b213-f17952cd0bc8 nodeName:}" failed. No retries permitted until 2025-12-05 05:46:56.893204933 +0000 UTC m=+1219.129935200 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data") pod "nova-cell1-cell-mapping-zn44j" (UID: "b873dbf3-75d1-4cdf-b213-f17952cd0bc8") : failed to sync secret cache: timed out waiting for the condition Dec 05 05:46:56 crc kubenswrapper[4652]: I1205 05:46:56.741751 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 05:46:56 crc kubenswrapper[4652]: I1205 05:46:56.922830 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:56 crc kubenswrapper[4652]: I1205 05:46:56.926794 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data\") pod \"nova-cell1-cell-mapping-zn44j\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:57 crc kubenswrapper[4652]: I1205 05:46:57.005549 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:46:57 crc kubenswrapper[4652]: I1205 05:46:57.382745 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zn44j"] Dec 05 05:46:57 crc kubenswrapper[4652]: W1205 05:46:57.383762 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb873dbf3_75d1_4cdf_b213_f17952cd0bc8.slice/crio-9479bec306d9b9e62563f6cd043faa0954028308e27f8a3e1a5a19e7842db461 WatchSource:0}: Error finding container 9479bec306d9b9e62563f6cd043faa0954028308e27f8a3e1a5a19e7842db461: Status 404 returned error can't find the container with id 9479bec306d9b9e62563f6cd043faa0954028308e27f8a3e1a5a19e7842db461 Dec 05 05:46:57 crc kubenswrapper[4652]: I1205 05:46:57.992928 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zn44j" event={"ID":"b873dbf3-75d1-4cdf-b213-f17952cd0bc8","Type":"ContainerStarted","Data":"a8b4c104548f841f5a694907ef0d52bd2d273200f23763ea80c48d4192abe0b1"} Dec 05 05:46:57 crc kubenswrapper[4652]: I1205 05:46:57.993138 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zn44j" event={"ID":"b873dbf3-75d1-4cdf-b213-f17952cd0bc8","Type":"ContainerStarted","Data":"9479bec306d9b9e62563f6cd043faa0954028308e27f8a3e1a5a19e7842db461"} Dec 05 05:46:58 crc kubenswrapper[4652]: I1205 05:46:58.008580 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-zn44j" podStartSLOduration=3.008568227 podStartE2EDuration="3.008568227s" podCreationTimestamp="2025-12-05 05:46:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:46:58.004538003 +0000 UTC m=+1220.241268260" watchObservedRunningTime="2025-12-05 05:46:58.008568227 +0000 UTC m=+1220.245298494" Dec 05 05:46:59 crc kubenswrapper[4652]: I1205 05:46:59.299713 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 05:46:59 crc kubenswrapper[4652]: I1205 05:46:59.299968 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 05:47:00 crc kubenswrapper[4652]: I1205 05:47:00.313671 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.218:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 05:47:00 crc kubenswrapper[4652]: I1205 05:47:00.313693 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.218:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 05:47:02 crc kubenswrapper[4652]: I1205 05:47:02.024114 4652 generic.go:334] "Generic (PLEG): container finished" podID="b873dbf3-75d1-4cdf-b213-f17952cd0bc8" containerID="a8b4c104548f841f5a694907ef0d52bd2d273200f23763ea80c48d4192abe0b1" exitCode=0 Dec 05 05:47:02 crc kubenswrapper[4652]: I1205 05:47:02.024202 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zn44j" event={"ID":"b873dbf3-75d1-4cdf-b213-f17952cd0bc8","Type":"ContainerDied","Data":"a8b4c104548f841f5a694907ef0d52bd2d273200f23763ea80c48d4192abe0b1"} Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.310919 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.334458 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-scripts\") pod \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.334498 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-combined-ca-bundle\") pod \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.334519 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data\") pod \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.334571 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnxkr\" (UniqueName: \"kubernetes.io/projected/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-kube-api-access-wnxkr\") pod \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\" (UID: \"b873dbf3-75d1-4cdf-b213-f17952cd0bc8\") " Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.344224 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-scripts" (OuterVolumeSpecName: "scripts") pod "b873dbf3-75d1-4cdf-b213-f17952cd0bc8" (UID: "b873dbf3-75d1-4cdf-b213-f17952cd0bc8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.345498 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-kube-api-access-wnxkr" (OuterVolumeSpecName: "kube-api-access-wnxkr") pod "b873dbf3-75d1-4cdf-b213-f17952cd0bc8" (UID: "b873dbf3-75d1-4cdf-b213-f17952cd0bc8"). InnerVolumeSpecName "kube-api-access-wnxkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.360397 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data" (OuterVolumeSpecName: "config-data") pod "b873dbf3-75d1-4cdf-b213-f17952cd0bc8" (UID: "b873dbf3-75d1-4cdf-b213-f17952cd0bc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.362241 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b873dbf3-75d1-4cdf-b213-f17952cd0bc8" (UID: "b873dbf3-75d1-4cdf-b213-f17952cd0bc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.436255 4652 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.436279 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.436289 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:03 crc kubenswrapper[4652]: I1205 05:47:03.436297 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnxkr\" (UniqueName: \"kubernetes.io/projected/b873dbf3-75d1-4cdf-b213-f17952cd0bc8-kube-api-access-wnxkr\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.040836 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zn44j" event={"ID":"b873dbf3-75d1-4cdf-b213-f17952cd0bc8","Type":"ContainerDied","Data":"9479bec306d9b9e62563f6cd043faa0954028308e27f8a3e1a5a19e7842db461"} Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.041037 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9479bec306d9b9e62563f6cd043faa0954028308e27f8a3e1a5a19e7842db461" Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.040897 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zn44j" Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.149987 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.150040 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.195363 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.195600 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-log" containerID="cri-o://83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228" gracePeriod=30 Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.195764 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-api" containerID="cri-o://608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e" gracePeriod=30 Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.208391 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.208621 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="51fc5aa7-5723-4983-8e88-6b96fa157fff" containerName="nova-scheduler-scheduler" containerID="cri-o://14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025" gracePeriod=30 Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.239247 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.239465 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-log" containerID="cri-o://423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06" gracePeriod=30 Dec 05 05:47:04 crc kubenswrapper[4652]: I1205 05:47:04.239586 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-metadata" containerID="cri-o://0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8" gracePeriod=30 Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.044816 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.055980 4652 generic.go:334] "Generic (PLEG): container finished" podID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerID="83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228" exitCode=143 Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.056052 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4594de24-91d2-408d-81a8-0d6d0b1e6324","Type":"ContainerDied","Data":"83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228"} Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.058703 4652 generic.go:334] "Generic (PLEG): container finished" podID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerID="0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8" exitCode=0 Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.058733 4652 generic.go:334] "Generic (PLEG): container finished" podID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerID="423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06" exitCode=143 Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.058764 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5583554b-9fea-4104-a8a7-91e2151b3f45","Type":"ContainerDied","Data":"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8"} Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.058945 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5583554b-9fea-4104-a8a7-91e2151b3f45","Type":"ContainerDied","Data":"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06"} Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.058968 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5583554b-9fea-4104-a8a7-91e2151b3f45","Type":"ContainerDied","Data":"1b87127603dcdac8ae6a2b4f901237ad015323e24ffb81abbd21782b8d7e96f4"} Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.058987 4652 scope.go:117] "RemoveContainer" containerID="0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.059305 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.061274 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtk87\" (UniqueName: \"kubernetes.io/projected/5583554b-9fea-4104-a8a7-91e2151b3f45-kube-api-access-rtk87\") pod \"5583554b-9fea-4104-a8a7-91e2151b3f45\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.061396 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5583554b-9fea-4104-a8a7-91e2151b3f45-logs\") pod \"5583554b-9fea-4104-a8a7-91e2151b3f45\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.061447 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-combined-ca-bundle\") pod \"5583554b-9fea-4104-a8a7-91e2151b3f45\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.061473 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-nova-metadata-tls-certs\") pod \"5583554b-9fea-4104-a8a7-91e2151b3f45\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.061633 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-config-data\") pod \"5583554b-9fea-4104-a8a7-91e2151b3f45\" (UID: \"5583554b-9fea-4104-a8a7-91e2151b3f45\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.064780 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5583554b-9fea-4104-a8a7-91e2151b3f45-logs" (OuterVolumeSpecName: "logs") pod "5583554b-9fea-4104-a8a7-91e2151b3f45" (UID: "5583554b-9fea-4104-a8a7-91e2151b3f45"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.066904 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5583554b-9fea-4104-a8a7-91e2151b3f45-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.086709 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5583554b-9fea-4104-a8a7-91e2151b3f45-kube-api-access-rtk87" (OuterVolumeSpecName: "kube-api-access-rtk87") pod "5583554b-9fea-4104-a8a7-91e2151b3f45" (UID: "5583554b-9fea-4104-a8a7-91e2151b3f45"). InnerVolumeSpecName "kube-api-access-rtk87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.094027 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-config-data" (OuterVolumeSpecName: "config-data") pod "5583554b-9fea-4104-a8a7-91e2151b3f45" (UID: "5583554b-9fea-4104-a8a7-91e2151b3f45"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.105881 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5583554b-9fea-4104-a8a7-91e2151b3f45" (UID: "5583554b-9fea-4104-a8a7-91e2151b3f45"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.111623 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5583554b-9fea-4104-a8a7-91e2151b3f45" (UID: "5583554b-9fea-4104-a8a7-91e2151b3f45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.145660 4652 scope.go:117] "RemoveContainer" containerID="423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.162033 4652 scope.go:117] "RemoveContainer" containerID="0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8" Dec 05 05:47:05 crc kubenswrapper[4652]: E1205 05:47:05.162396 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8\": container with ID starting with 0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8 not found: ID does not exist" containerID="0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.162433 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8"} err="failed to get container status \"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8\": rpc error: code = NotFound desc = could not find container \"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8\": container with ID starting with 0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8 not found: ID does not exist" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.162453 4652 scope.go:117] "RemoveContainer" containerID="423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06" Dec 05 05:47:05 crc kubenswrapper[4652]: E1205 05:47:05.162791 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06\": container with ID starting with 423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06 not found: ID does not exist" containerID="423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.162819 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06"} err="failed to get container status \"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06\": rpc error: code = NotFound desc = could not find container \"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06\": container with ID starting with 423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06 not found: ID does not exist" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.162835 4652 scope.go:117] "RemoveContainer" containerID="0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.163011 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8"} err="failed to get container status \"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8\": rpc error: code = NotFound desc = could not find container \"0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8\": container with ID starting with 0c21b0ae7384b7b64614cf1c5bfe54497af8806e87386b84557e189bbcdd12c8 not found: ID does not exist" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.163027 4652 scope.go:117] "RemoveContainer" containerID="423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.163181 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06"} err="failed to get container status \"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06\": rpc error: code = NotFound desc = could not find container \"423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06\": container with ID starting with 423d94cc69968644ebf372df3a79099a6f6baa33c7375291a8d2fdf99ccd6e06 not found: ID does not exist" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.168836 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.169234 4652 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.169303 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5583554b-9fea-4104-a8a7-91e2151b3f45-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.169356 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtk87\" (UniqueName: \"kubernetes.io/projected/5583554b-9fea-4104-a8a7-91e2151b3f45-kube-api-access-rtk87\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.388637 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.404402 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.417927 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.426628 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:47:05 crc kubenswrapper[4652]: E1205 05:47:05.427833 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-log" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.427852 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-log" Dec 05 05:47:05 crc kubenswrapper[4652]: E1205 05:47:05.427863 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b873dbf3-75d1-4cdf-b213-f17952cd0bc8" containerName="nova-manage" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.427870 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b873dbf3-75d1-4cdf-b213-f17952cd0bc8" containerName="nova-manage" Dec 05 05:47:05 crc kubenswrapper[4652]: E1205 05:47:05.427883 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-log" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.427889 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-log" Dec 05 05:47:05 crc kubenswrapper[4652]: E1205 05:47:05.427900 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-metadata" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.427905 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-metadata" Dec 05 05:47:05 crc kubenswrapper[4652]: E1205 05:47:05.427929 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-api" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.427935 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-api" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.428086 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-metadata" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.428097 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-log" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.428107 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" containerName="nova-metadata-log" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.428118 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b873dbf3-75d1-4cdf-b213-f17952cd0bc8" containerName="nova-manage" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.428131 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerName="nova-api-api" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.429147 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.432527 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.432739 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.441906 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.474828 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-config-data\") pod \"4594de24-91d2-408d-81a8-0d6d0b1e6324\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475017 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4594de24-91d2-408d-81a8-0d6d0b1e6324-logs\") pod \"4594de24-91d2-408d-81a8-0d6d0b1e6324\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475046 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-internal-tls-certs\") pod \"4594de24-91d2-408d-81a8-0d6d0b1e6324\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475079 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-public-tls-certs\") pod \"4594de24-91d2-408d-81a8-0d6d0b1e6324\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475162 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpg8r\" (UniqueName: \"kubernetes.io/projected/4594de24-91d2-408d-81a8-0d6d0b1e6324-kube-api-access-xpg8r\") pod \"4594de24-91d2-408d-81a8-0d6d0b1e6324\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475199 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-combined-ca-bundle\") pod \"4594de24-91d2-408d-81a8-0d6d0b1e6324\" (UID: \"4594de24-91d2-408d-81a8-0d6d0b1e6324\") " Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475467 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4594de24-91d2-408d-81a8-0d6d0b1e6324-logs" (OuterVolumeSpecName: "logs") pod "4594de24-91d2-408d-81a8-0d6d0b1e6324" (UID: "4594de24-91d2-408d-81a8-0d6d0b1e6324"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475564 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475603 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-config-data\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475622 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s64mv\" (UniqueName: \"kubernetes.io/projected/d5ed1fac-21dc-4868-aea0-114c5430b87a-kube-api-access-s64mv\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.475825 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.476044 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5ed1fac-21dc-4868-aea0-114c5430b87a-logs\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.476289 4652 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4594de24-91d2-408d-81a8-0d6d0b1e6324-logs\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.481268 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4594de24-91d2-408d-81a8-0d6d0b1e6324-kube-api-access-xpg8r" (OuterVolumeSpecName: "kube-api-access-xpg8r") pod "4594de24-91d2-408d-81a8-0d6d0b1e6324" (UID: "4594de24-91d2-408d-81a8-0d6d0b1e6324"). InnerVolumeSpecName "kube-api-access-xpg8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.496859 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-config-data" (OuterVolumeSpecName: "config-data") pod "4594de24-91d2-408d-81a8-0d6d0b1e6324" (UID: "4594de24-91d2-408d-81a8-0d6d0b1e6324"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.497577 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4594de24-91d2-408d-81a8-0d6d0b1e6324" (UID: "4594de24-91d2-408d-81a8-0d6d0b1e6324"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.512873 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4594de24-91d2-408d-81a8-0d6d0b1e6324" (UID: "4594de24-91d2-408d-81a8-0d6d0b1e6324"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.514005 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4594de24-91d2-408d-81a8-0d6d0b1e6324" (UID: "4594de24-91d2-408d-81a8-0d6d0b1e6324"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.577279 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.577387 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-config-data\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.577493 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s64mv\" (UniqueName: \"kubernetes.io/projected/d5ed1fac-21dc-4868-aea0-114c5430b87a-kube-api-access-s64mv\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.577669 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.578298 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5ed1fac-21dc-4868-aea0-114c5430b87a-logs\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.578479 4652 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.578542 4652 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.578602 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5ed1fac-21dc-4868-aea0-114c5430b87a-logs\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.578612 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpg8r\" (UniqueName: \"kubernetes.io/projected/4594de24-91d2-408d-81a8-0d6d0b1e6324-kube-api-access-xpg8r\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.578666 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.578680 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4594de24-91d2-408d-81a8-0d6d0b1e6324-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.580033 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-config-data\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.580082 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.580659 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5ed1fac-21dc-4868-aea0-114c5430b87a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.590973 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s64mv\" (UniqueName: \"kubernetes.io/projected/d5ed1fac-21dc-4868-aea0-114c5430b87a-kube-api-access-s64mv\") pod \"nova-metadata-0\" (UID: \"d5ed1fac-21dc-4868-aea0-114c5430b87a\") " pod="openstack/nova-metadata-0" Dec 05 05:47:05 crc kubenswrapper[4652]: I1205 05:47:05.759287 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.068595 4652 generic.go:334] "Generic (PLEG): container finished" podID="4594de24-91d2-408d-81a8-0d6d0b1e6324" containerID="608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e" exitCode=0 Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.068638 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4594de24-91d2-408d-81a8-0d6d0b1e6324","Type":"ContainerDied","Data":"608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e"} Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.068800 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4594de24-91d2-408d-81a8-0d6d0b1e6324","Type":"ContainerDied","Data":"1fcbed19aed7de005895f7ce11e21e4fc130fc88400312c6d6ad2b9778c621e7"} Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.068820 4652 scope.go:117] "RemoveContainer" containerID="608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.068662 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.089476 4652 scope.go:117] "RemoveContainer" containerID="83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.098931 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.105328 4652 scope.go:117] "RemoveContainer" containerID="608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e" Dec 05 05:47:06 crc kubenswrapper[4652]: E1205 05:47:06.105667 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e\": container with ID starting with 608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e not found: ID does not exist" containerID="608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.105696 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e"} err="failed to get container status \"608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e\": rpc error: code = NotFound desc = could not find container \"608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e\": container with ID starting with 608743e35b42c420b4cd740253e50a28f33b59b8ca11c8685e0e73bdcc35555e not found: ID does not exist" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.105716 4652 scope.go:117] "RemoveContainer" containerID="83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228" Dec 05 05:47:06 crc kubenswrapper[4652]: E1205 05:47:06.106871 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228\": container with ID starting with 83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228 not found: ID does not exist" containerID="83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.106908 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228"} err="failed to get container status \"83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228\": rpc error: code = NotFound desc = could not find container \"83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228\": container with ID starting with 83e69620bd5dc0cbf09d6c61475fec1a3f91e787e98fdb007ec5490dc8a4d228 not found: ID does not exist" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.107492 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.118830 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.121980 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.126900 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.127332 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.130118 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.137642 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4594de24-91d2-408d-81a8-0d6d0b1e6324" path="/var/lib/kubelet/pods/4594de24-91d2-408d-81a8-0d6d0b1e6324/volumes" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.138211 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5583554b-9fea-4104-a8a7-91e2151b3f45" path="/var/lib/kubelet/pods/5583554b-9fea-4104-a8a7-91e2151b3f45/volumes" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.138809 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.154815 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 05:47:06 crc kubenswrapper[4652]: W1205 05:47:06.157240 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5ed1fac_21dc_4868_aea0_114c5430b87a.slice/crio-ba5e01bd61d962da15c3063bb1769ef076040c2e786a775f644742bc6a383103 WatchSource:0}: Error finding container ba5e01bd61d962da15c3063bb1769ef076040c2e786a775f644742bc6a383103: Status 404 returned error can't find the container with id ba5e01bd61d962da15c3063bb1769ef076040c2e786a775f644742bc6a383103 Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.191777 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/648b01a7-3843-4d88-b8a3-fcd6d0b19231-logs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.192005 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.192026 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pfhj\" (UniqueName: \"kubernetes.io/projected/648b01a7-3843-4d88-b8a3-fcd6d0b19231-kube-api-access-2pfhj\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.192130 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-config-data\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.192168 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-public-tls-certs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.192204 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-internal-tls-certs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.293145 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-internal-tls-certs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.293247 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/648b01a7-3843-4d88-b8a3-fcd6d0b19231-logs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.293288 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.293303 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pfhj\" (UniqueName: \"kubernetes.io/projected/648b01a7-3843-4d88-b8a3-fcd6d0b19231-kube-api-access-2pfhj\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.293365 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-config-data\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.293386 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-public-tls-certs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.294068 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/648b01a7-3843-4d88-b8a3-fcd6d0b19231-logs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.295954 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-internal-tls-certs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.296403 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-public-tls-certs\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.296815 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-config-data\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.297152 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/648b01a7-3843-4d88-b8a3-fcd6d0b19231-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.307218 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pfhj\" (UniqueName: \"kubernetes.io/projected/648b01a7-3843-4d88-b8a3-fcd6d0b19231-kube-api-access-2pfhj\") pod \"nova-api-0\" (UID: \"648b01a7-3843-4d88-b8a3-fcd6d0b19231\") " pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.436511 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 05:47:06 crc kubenswrapper[4652]: I1205 05:47:06.805740 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.079856 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"648b01a7-3843-4d88-b8a3-fcd6d0b19231","Type":"ContainerStarted","Data":"10bd02e7360cc8c06cd595208bb75bded947f85e1d34cc7fed175016e25553fc"} Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.080051 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"648b01a7-3843-4d88-b8a3-fcd6d0b19231","Type":"ContainerStarted","Data":"f6558d1970b00433cd0f82c9f56797a57d60ec6f368e87222e1aa6d17c8b0286"} Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.080064 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"648b01a7-3843-4d88-b8a3-fcd6d0b19231","Type":"ContainerStarted","Data":"ca60aa37a8198a3802b6473bdbaba5af4cf66977a9f4b29dbc3c64c5872b1ee8"} Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.081659 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ed1fac-21dc-4868-aea0-114c5430b87a","Type":"ContainerStarted","Data":"d80a1505069e90901889e8c5a5582a0d6876162b9a6c65d5f9f4b206495587ec"} Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.081693 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ed1fac-21dc-4868-aea0-114c5430b87a","Type":"ContainerStarted","Data":"70a5e68bb384ab4808f569eecf93962681f45215a791cb6aa07eebf1b67baa08"} Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.081704 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d5ed1fac-21dc-4868-aea0-114c5430b87a","Type":"ContainerStarted","Data":"ba5e01bd61d962da15c3063bb1769ef076040c2e786a775f644742bc6a383103"} Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.102348 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.102334593 podStartE2EDuration="1.102334593s" podCreationTimestamp="2025-12-05 05:47:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:47:07.094688604 +0000 UTC m=+1229.331418871" watchObservedRunningTime="2025-12-05 05:47:07.102334593 +0000 UTC m=+1229.339064860" Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.122135 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.122121358 podStartE2EDuration="2.122121358s" podCreationTimestamp="2025-12-05 05:47:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:47:07.113574425 +0000 UTC m=+1229.350304693" watchObservedRunningTime="2025-12-05 05:47:07.122121358 +0000 UTC m=+1229.358851626" Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.592701 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.612019 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-config-data\") pod \"51fc5aa7-5723-4983-8e88-6b96fa157fff\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.612085 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2d42\" (UniqueName: \"kubernetes.io/projected/51fc5aa7-5723-4983-8e88-6b96fa157fff-kube-api-access-h2d42\") pod \"51fc5aa7-5723-4983-8e88-6b96fa157fff\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.612121 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-combined-ca-bundle\") pod \"51fc5aa7-5723-4983-8e88-6b96fa157fff\" (UID: \"51fc5aa7-5723-4983-8e88-6b96fa157fff\") " Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.615819 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51fc5aa7-5723-4983-8e88-6b96fa157fff-kube-api-access-h2d42" (OuterVolumeSpecName: "kube-api-access-h2d42") pod "51fc5aa7-5723-4983-8e88-6b96fa157fff" (UID: "51fc5aa7-5723-4983-8e88-6b96fa157fff"). InnerVolumeSpecName "kube-api-access-h2d42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.633454 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51fc5aa7-5723-4983-8e88-6b96fa157fff" (UID: "51fc5aa7-5723-4983-8e88-6b96fa157fff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.634075 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-config-data" (OuterVolumeSpecName: "config-data") pod "51fc5aa7-5723-4983-8e88-6b96fa157fff" (UID: "51fc5aa7-5723-4983-8e88-6b96fa157fff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.715254 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.715285 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2d42\" (UniqueName: \"kubernetes.io/projected/51fc5aa7-5723-4983-8e88-6b96fa157fff-kube-api-access-h2d42\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:07 crc kubenswrapper[4652]: I1205 05:47:07.715315 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51fc5aa7-5723-4983-8e88-6b96fa157fff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.090034 4652 generic.go:334] "Generic (PLEG): container finished" podID="51fc5aa7-5723-4983-8e88-6b96fa157fff" containerID="14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025" exitCode=0 Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.090080 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.090095 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"51fc5aa7-5723-4983-8e88-6b96fa157fff","Type":"ContainerDied","Data":"14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025"} Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.090624 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"51fc5aa7-5723-4983-8e88-6b96fa157fff","Type":"ContainerDied","Data":"7c6bf352e481f31ea1ad69d96a86abea6fc29272743f4011edf1f0ef80bf3562"} Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.090644 4652 scope.go:117] "RemoveContainer" containerID="14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.108817 4652 scope.go:117] "RemoveContainer" containerID="14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025" Dec 05 05:47:08 crc kubenswrapper[4652]: E1205 05:47:08.109166 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025\": container with ID starting with 14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025 not found: ID does not exist" containerID="14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.109195 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025"} err="failed to get container status \"14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025\": rpc error: code = NotFound desc = could not find container \"14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025\": container with ID starting with 14f3b4399ae77899607a6534edebdeed531e3ec9c52eabb9e4fab2e1446c5025 not found: ID does not exist" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.115889 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.124372 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.137229 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51fc5aa7-5723-4983-8e88-6b96fa157fff" path="/var/lib/kubelet/pods/51fc5aa7-5723-4983-8e88-6b96fa157fff/volumes" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.137778 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:47:08 crc kubenswrapper[4652]: E1205 05:47:08.138088 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51fc5aa7-5723-4983-8e88-6b96fa157fff" containerName="nova-scheduler-scheduler" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.138105 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="51fc5aa7-5723-4983-8e88-6b96fa157fff" containerName="nova-scheduler-scheduler" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.138304 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="51fc5aa7-5723-4983-8e88-6b96fa157fff" containerName="nova-scheduler-scheduler" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.138949 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.140497 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.146056 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.231505 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9602531b-42f8-4992-b338-f184b39ae600-config-data\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.231738 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9602531b-42f8-4992-b338-f184b39ae600-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.231883 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r2ml\" (UniqueName: \"kubernetes.io/projected/9602531b-42f8-4992-b338-f184b39ae600-kube-api-access-9r2ml\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.334869 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9602531b-42f8-4992-b338-f184b39ae600-config-data\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.335440 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9602531b-42f8-4992-b338-f184b39ae600-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.335504 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r2ml\" (UniqueName: \"kubernetes.io/projected/9602531b-42f8-4992-b338-f184b39ae600-kube-api-access-9r2ml\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.338992 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9602531b-42f8-4992-b338-f184b39ae600-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.339012 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9602531b-42f8-4992-b338-f184b39ae600-config-data\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.350017 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r2ml\" (UniqueName: \"kubernetes.io/projected/9602531b-42f8-4992-b338-f184b39ae600-kube-api-access-9r2ml\") pod \"nova-scheduler-0\" (UID: \"9602531b-42f8-4992-b338-f184b39ae600\") " pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.456637 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 05:47:08 crc kubenswrapper[4652]: I1205 05:47:08.825192 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 05:47:09 crc kubenswrapper[4652]: I1205 05:47:09.099704 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9602531b-42f8-4992-b338-f184b39ae600","Type":"ContainerStarted","Data":"d21394883d27c7f900e58aed4becad5d1394ad701b830b61098f446c5a13ee74"} Dec 05 05:47:09 crc kubenswrapper[4652]: I1205 05:47:09.099927 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9602531b-42f8-4992-b338-f184b39ae600","Type":"ContainerStarted","Data":"c45538ebc5d98053be6d8eff02b810659d834e38da6830e265194630f72f075f"} Dec 05 05:47:09 crc kubenswrapper[4652]: I1205 05:47:09.115718 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.115702774 podStartE2EDuration="1.115702774s" podCreationTimestamp="2025-12-05 05:47:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:47:09.110491811 +0000 UTC m=+1231.347222078" watchObservedRunningTime="2025-12-05 05:47:09.115702774 +0000 UTC m=+1231.352433041" Dec 05 05:47:10 crc kubenswrapper[4652]: I1205 05:47:10.760068 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 05:47:10 crc kubenswrapper[4652]: I1205 05:47:10.760273 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 05:47:13 crc kubenswrapper[4652]: I1205 05:47:13.457633 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 05:47:15 crc kubenswrapper[4652]: I1205 05:47:15.760420 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 05:47:15 crc kubenswrapper[4652]: I1205 05:47:15.760662 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 05:47:16 crc kubenswrapper[4652]: I1205 05:47:16.436998 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 05:47:16 crc kubenswrapper[4652]: I1205 05:47:16.437040 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 05:47:16 crc kubenswrapper[4652]: I1205 05:47:16.772679 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d5ed1fac-21dc-4868-aea0-114c5430b87a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 05:47:16 crc kubenswrapper[4652]: I1205 05:47:16.772697 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d5ed1fac-21dc-4868-aea0-114c5430b87a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 05:47:17 crc kubenswrapper[4652]: I1205 05:47:17.450655 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="648b01a7-3843-4d88-b8a3-fcd6d0b19231" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.222:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 05:47:17 crc kubenswrapper[4652]: I1205 05:47:17.450690 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="648b01a7-3843-4d88-b8a3-fcd6d0b19231" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.222:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 05:47:18 crc kubenswrapper[4652]: I1205 05:47:18.457700 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 05:47:18 crc kubenswrapper[4652]: I1205 05:47:18.480235 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 05:47:19 crc kubenswrapper[4652]: I1205 05:47:19.210184 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 05:47:19 crc kubenswrapper[4652]: I1205 05:47:19.320947 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 05:47:25 crc kubenswrapper[4652]: I1205 05:47:25.764453 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 05:47:25 crc kubenswrapper[4652]: I1205 05:47:25.765085 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 05:47:25 crc kubenswrapper[4652]: I1205 05:47:25.768568 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 05:47:26 crc kubenswrapper[4652]: I1205 05:47:26.242822 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 05:47:26 crc kubenswrapper[4652]: I1205 05:47:26.445777 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 05:47:26 crc kubenswrapper[4652]: I1205 05:47:26.446224 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 05:47:26 crc kubenswrapper[4652]: I1205 05:47:26.446295 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 05:47:26 crc kubenswrapper[4652]: I1205 05:47:26.453231 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 05:47:27 crc kubenswrapper[4652]: I1205 05:47:27.247803 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 05:47:27 crc kubenswrapper[4652]: I1205 05:47:27.255600 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 05:47:33 crc kubenswrapper[4652]: I1205 05:47:33.595225 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:47:34 crc kubenswrapper[4652]: I1205 05:47:34.150247 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:47:34 crc kubenswrapper[4652]: I1205 05:47:34.150305 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:47:34 crc kubenswrapper[4652]: I1205 05:47:34.262055 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:47:36 crc kubenswrapper[4652]: I1205 05:47:36.062108 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerName="rabbitmq" containerID="cri-o://0e3a5fca10802e00f74a6b2b4ac368814855aef5424c778c1299748257a5e6ec" gracePeriod=604798 Dec 05 05:47:36 crc kubenswrapper[4652]: I1205 05:47:36.556288 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Dec 05 05:47:36 crc kubenswrapper[4652]: I1205 05:47:36.779112 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerName="rabbitmq" containerID="cri-o://40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad" gracePeriod=604798 Dec 05 05:47:36 crc kubenswrapper[4652]: I1205 05:47:36.810636 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.344101 4652 generic.go:334] "Generic (PLEG): container finished" podID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerID="0e3a5fca10802e00f74a6b2b4ac368814855aef5424c778c1299748257a5e6ec" exitCode=0 Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.344159 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4aa077b9-9612-44cf-b163-d0c1f9468787","Type":"ContainerDied","Data":"0e3a5fca10802e00f74a6b2b4ac368814855aef5424c778c1299748257a5e6ec"} Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.452076 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561174 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-server-conf\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561232 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561259 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4aa077b9-9612-44cf-b163-d0c1f9468787-pod-info\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561318 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4aa077b9-9612-44cf-b163-d0c1f9468787-erlang-cookie-secret\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561374 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-erlang-cookie\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561451 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqkz8\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-kube-api-access-wqkz8\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561487 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-plugins\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561734 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-plugins-conf\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561790 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-config-data\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561815 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-confd\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.561882 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-tls\") pod \"4aa077b9-9612-44cf-b163-d0c1f9468787\" (UID: \"4aa077b9-9612-44cf-b163-d0c1f9468787\") " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.562942 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.563139 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.564706 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.566699 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.566948 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4aa077b9-9612-44cf-b163-d0c1f9468787-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.567270 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.567329 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4aa077b9-9612-44cf-b163-d0c1f9468787-pod-info" (OuterVolumeSpecName: "pod-info") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.568372 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-kube-api-access-wqkz8" (OuterVolumeSpecName: "kube-api-access-wqkz8") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "kube-api-access-wqkz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.586304 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-config-data" (OuterVolumeSpecName: "config-data") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.600058 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-server-conf" (OuterVolumeSpecName: "server-conf") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.642422 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4aa077b9-9612-44cf-b163-d0c1f9468787" (UID: "4aa077b9-9612-44cf-b163-d0c1f9468787"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664479 4652 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4aa077b9-9612-44cf-b163-d0c1f9468787-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664511 4652 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4aa077b9-9612-44cf-b163-d0c1f9468787-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664524 4652 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664532 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqkz8\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-kube-api-access-wqkz8\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664541 4652 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664549 4652 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664586 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664594 4652 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664604 4652 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4aa077b9-9612-44cf-b163-d0c1f9468787-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664611 4652 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4aa077b9-9612-44cf-b163-d0c1f9468787-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.664632 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.684602 4652 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 05 05:47:37 crc kubenswrapper[4652]: I1205 05:47:37.765920 4652 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.106513 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.171662 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-confd\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.171714 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.171745 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-pod-info\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.171872 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-config-data\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.171890 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-plugins-conf\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.171925 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-erlang-cookie\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.172031 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-plugins\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.172056 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-tls\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.172120 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-erlang-cookie-secret\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.172146 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-server-conf\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.172169 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsbjj\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-kube-api-access-fsbjj\") pod \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\" (UID: \"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f\") " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.173665 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.176678 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.176912 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.179325 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.185240 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-kube-api-access-fsbjj" (OuterVolumeSpecName: "kube-api-access-fsbjj") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "kube-api-access-fsbjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.186252 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-pod-info" (OuterVolumeSpecName: "pod-info") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.186818 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.187547 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.198435 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-config-data" (OuterVolumeSpecName: "config-data") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.216972 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-server-conf" (OuterVolumeSpecName: "server-conf") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.259910 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" (UID: "8f7bc86b-3d63-41b5-b116-5b638d1c9f7f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274505 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274531 4652 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274542 4652 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274551 4652 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274572 4652 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274582 4652 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274590 4652 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274597 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsbjj\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-kube-api-access-fsbjj\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274605 4652 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274624 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.274631 4652 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.291308 4652 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.360099 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4aa077b9-9612-44cf-b163-d0c1f9468787","Type":"ContainerDied","Data":"4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f"} Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.360153 4652 scope.go:117] "RemoveContainer" containerID="0e3a5fca10802e00f74a6b2b4ac368814855aef5424c778c1299748257a5e6ec" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.360277 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.375894 4652 generic.go:334] "Generic (PLEG): container finished" podID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerID="40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad" exitCode=0 Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.375939 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f","Type":"ContainerDied","Data":"40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad"} Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.375965 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8f7bc86b-3d63-41b5-b116-5b638d1c9f7f","Type":"ContainerDied","Data":"3a3d26c0302ddf0c697b0e8e287faa59e64929d49ebdce99e712c2ea3b10921f"} Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.376057 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.378330 4652 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.397919 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.412472 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.414508 4652 scope.go:117] "RemoveContainer" containerID="10385fadc1c0181fdfc6ad208b74d8a4b1a93afe7b38437feb6e08a3cf4c076e" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.418543 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.438040 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.451598 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:47:38 crc kubenswrapper[4652]: E1205 05:47:38.451994 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerName="setup-container" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.452011 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerName="setup-container" Dec 05 05:47:38 crc kubenswrapper[4652]: E1205 05:47:38.452046 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerName="rabbitmq" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.452052 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerName="rabbitmq" Dec 05 05:47:38 crc kubenswrapper[4652]: E1205 05:47:38.452069 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerName="setup-container" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.452074 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerName="setup-container" Dec 05 05:47:38 crc kubenswrapper[4652]: E1205 05:47:38.452085 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerName="rabbitmq" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.452091 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerName="rabbitmq" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.452291 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" containerName="rabbitmq" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.452313 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" containerName="rabbitmq" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.453211 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.454400 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.454777 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.470239 4652 scope.go:117] "RemoveContainer" containerID="40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474049 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-twzml" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474149 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474271 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474379 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474485 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474618 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474695 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474760 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474822 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474879 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.474939 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.475000 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.475068 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.475070 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-kjc7q" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.482386 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.498672 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.501701 4652 scope.go:117] "RemoveContainer" containerID="8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.522344 4652 scope.go:117] "RemoveContainer" containerID="40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad" Dec 05 05:47:38 crc kubenswrapper[4652]: E1205 05:47:38.523012 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad\": container with ID starting with 40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad not found: ID does not exist" containerID="40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.523074 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad"} err="failed to get container status \"40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad\": rpc error: code = NotFound desc = could not find container \"40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad\": container with ID starting with 40c4889d0900cf00e3b31f0cda5ac2e5695b7cc7499902019a1c5e5942ebd7ad not found: ID does not exist" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.523099 4652 scope.go:117] "RemoveContainer" containerID="8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594" Dec 05 05:47:38 crc kubenswrapper[4652]: E1205 05:47:38.524149 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594\": container with ID starting with 8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594 not found: ID does not exist" containerID="8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.524188 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594"} err="failed to get container status \"8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594\": rpc error: code = NotFound desc = could not find container \"8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594\": container with ID starting with 8a7243db27e5de241fe20209bf5d086e9bf9e87ecb5925ba2a85b2f8d5a88594 not found: ID does not exist" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.582486 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.582521 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.582549 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.582583 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.582715 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-config-data\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.582761 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.582795 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.582914 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583017 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583073 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583103 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583126 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f3a8fdb1-ae46-469f-9d70-a1947c935abd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583146 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583292 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583342 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583432 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583494 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583588 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzmbz\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-kube-api-access-lzmbz\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583662 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f3a8fdb1-ae46-469f-9d70-a1947c935abd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583684 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqsk5\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-kube-api-access-lqsk5\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583700 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.583780 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.684644 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.684851 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.684923 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685019 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzmbz\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-kube-api-access-lzmbz\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685099 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f3a8fdb1-ae46-469f-9d70-a1947c935abd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685158 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqsk5\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-kube-api-access-lqsk5\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685219 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685289 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685361 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685431 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685503 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685589 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685672 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-config-data\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685743 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685807 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685865 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685952 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686015 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686021 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686077 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686090 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.685772 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686130 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f3a8fdb1-ae46-469f-9d70-a1947c935abd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686155 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686243 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686249 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686440 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686503 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686669 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-config-data\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686698 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.686738 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.687076 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f3a8fdb1-ae46-469f-9d70-a1947c935abd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.687240 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.688325 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.688775 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f3a8fdb1-ae46-469f-9d70-a1947c935abd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.689585 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.689922 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.690107 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f3a8fdb1-ae46-469f-9d70-a1947c935abd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.690540 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.690574 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.691357 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.695141 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.701266 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzmbz\" (UniqueName: \"kubernetes.io/projected/f3a8fdb1-ae46-469f-9d70-a1947c935abd-kube-api-access-lzmbz\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.702472 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqsk5\" (UniqueName: \"kubernetes.io/projected/ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8-kube-api-access-lqsk5\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.724985 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8\") " pod="openstack/rabbitmq-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.727631 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f3a8fdb1-ae46-469f-9d70-a1947c935abd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.803850 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:47:38 crc kubenswrapper[4652]: I1205 05:47:38.808062 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 05:47:39 crc kubenswrapper[4652]: W1205 05:47:39.193992 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3a8fdb1_ae46_469f_9d70_a1947c935abd.slice/crio-a53e8b3c2127fa899bb262a00a608cd733a58d9c61a9e91e94f90044e3e99bfe WatchSource:0}: Error finding container a53e8b3c2127fa899bb262a00a608cd733a58d9c61a9e91e94f90044e3e99bfe: Status 404 returned error can't find the container with id a53e8b3c2127fa899bb262a00a608cd733a58d9c61a9e91e94f90044e3e99bfe Dec 05 05:47:39 crc kubenswrapper[4652]: W1205 05:47:39.196273 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad23a29e_807c_4ec3_a29c_2ef1cfa1acd8.slice/crio-700303a53f1b1b105da34279631919e3888a0106d8330856b0068b994af77d79 WatchSource:0}: Error finding container 700303a53f1b1b105da34279631919e3888a0106d8330856b0068b994af77d79: Status 404 returned error can't find the container with id 700303a53f1b1b105da34279631919e3888a0106d8330856b0068b994af77d79 Dec 05 05:47:39 crc kubenswrapper[4652]: I1205 05:47:39.199159 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 05:47:39 crc kubenswrapper[4652]: I1205 05:47:39.207984 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 05:47:39 crc kubenswrapper[4652]: I1205 05:47:39.383760 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8","Type":"ContainerStarted","Data":"700303a53f1b1b105da34279631919e3888a0106d8330856b0068b994af77d79"} Dec 05 05:47:39 crc kubenswrapper[4652]: I1205 05:47:39.384852 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f3a8fdb1-ae46-469f-9d70-a1947c935abd","Type":"ContainerStarted","Data":"a53e8b3c2127fa899bb262a00a608cd733a58d9c61a9e91e94f90044e3e99bfe"} Dec 05 05:47:40 crc kubenswrapper[4652]: I1205 05:47:40.134656 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4aa077b9-9612-44cf-b163-d0c1f9468787" path="/var/lib/kubelet/pods/4aa077b9-9612-44cf-b163-d0c1f9468787/volumes" Dec 05 05:47:40 crc kubenswrapper[4652]: I1205 05:47:40.136142 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f7bc86b-3d63-41b5-b116-5b638d1c9f7f" path="/var/lib/kubelet/pods/8f7bc86b-3d63-41b5-b116-5b638d1c9f7f/volumes" Dec 05 05:47:40 crc kubenswrapper[4652]: I1205 05:47:40.395750 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8","Type":"ContainerStarted","Data":"807f796575a81a6f3d55cbb033b5ed34dfbb602f172410223d4282ead9d50c40"} Dec 05 05:47:40 crc kubenswrapper[4652]: I1205 05:47:40.397891 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f3a8fdb1-ae46-469f-9d70-a1947c935abd","Type":"ContainerStarted","Data":"c3ce8fd92e6c9b9be54d4c6843832ad528e763b21ab80b46df0a561bef925f17"} Dec 05 05:47:44 crc kubenswrapper[4652]: E1205 05:47:44.731545 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice/crio-4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f\": RecentStats: unable to find data in memory cache]" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.305657 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-x9jx5"] Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.307576 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.313036 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.333251 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-x9jx5"] Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.404437 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-nb\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.404529 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-config\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.404615 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-openstack-edpm-ipam\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.404671 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-swift-storage-0\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.404874 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2mmw\" (UniqueName: \"kubernetes.io/projected/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-kube-api-access-m2mmw\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.405012 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-svc\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.405227 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-sb\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.507518 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-swift-storage-0\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.507603 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2mmw\" (UniqueName: \"kubernetes.io/projected/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-kube-api-access-m2mmw\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.507636 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-svc\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.507695 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-sb\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.507725 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-nb\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.507763 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-config\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.507789 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-openstack-edpm-ipam\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.508722 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-openstack-edpm-ipam\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.508826 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-nb\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.508934 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-svc\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.508939 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-config\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.508987 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-sb\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.509097 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-swift-storage-0\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.529382 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2mmw\" (UniqueName: \"kubernetes.io/projected/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-kube-api-access-m2mmw\") pod \"dnsmasq-dns-bf6c7df67-x9jx5\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:45 crc kubenswrapper[4652]: I1205 05:47:45.623424 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:46 crc kubenswrapper[4652]: I1205 05:47:46.022385 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-x9jx5"] Dec 05 05:47:46 crc kubenswrapper[4652]: I1205 05:47:46.447978 4652 generic.go:334] "Generic (PLEG): container finished" podID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" containerID="ce8ea4d4acbc3aef94bfa63bea8ff2aaecda8629c99f4e3129e3792e63a1cd27" exitCode=0 Dec 05 05:47:46 crc kubenswrapper[4652]: I1205 05:47:46.448075 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" event={"ID":"fa3e3eff-c7ff-488f-aaa1-727896fda6f8","Type":"ContainerDied","Data":"ce8ea4d4acbc3aef94bfa63bea8ff2aaecda8629c99f4e3129e3792e63a1cd27"} Dec 05 05:47:46 crc kubenswrapper[4652]: I1205 05:47:46.448242 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" event={"ID":"fa3e3eff-c7ff-488f-aaa1-727896fda6f8","Type":"ContainerStarted","Data":"97db6bfe6bb61658d9c155cd4a518841b9d98f807b9b98063ca0edf4888845a8"} Dec 05 05:47:47 crc kubenswrapper[4652]: I1205 05:47:47.458435 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" event={"ID":"fa3e3eff-c7ff-488f-aaa1-727896fda6f8","Type":"ContainerStarted","Data":"ab18b61dcd503e6f02af257d289af2329089c210c8ad7190443d55301d9d2e28"} Dec 05 05:47:47 crc kubenswrapper[4652]: I1205 05:47:47.459590 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:47 crc kubenswrapper[4652]: I1205 05:47:47.473916 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" podStartSLOduration=2.47389851 podStartE2EDuration="2.47389851s" podCreationTimestamp="2025-12-05 05:47:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:47:47.473327868 +0000 UTC m=+1269.710058134" watchObservedRunningTime="2025-12-05 05:47:47.47389851 +0000 UTC m=+1269.710628777" Dec 05 05:47:54 crc kubenswrapper[4652]: E1205 05:47:54.967151 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice/crio-4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f\": RecentStats: unable to find data in memory cache]" Dec 05 05:47:55 crc kubenswrapper[4652]: I1205 05:47:55.624773 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:47:55 crc kubenswrapper[4652]: I1205 05:47:55.671217 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-bmqg6"] Dec 05 05:47:55 crc kubenswrapper[4652]: I1205 05:47:55.671454 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" podUID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" containerName="dnsmasq-dns" containerID="cri-o://3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9" gracePeriod=10 Dec 05 05:47:55 crc kubenswrapper[4652]: I1205 05:47:55.907356 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77b58f4b85-rwqgd"] Dec 05 05:47:55 crc kubenswrapper[4652]: I1205 05:47:55.909749 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:55 crc kubenswrapper[4652]: I1205 05:47:55.918938 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77b58f4b85-rwqgd"] Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.014914 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-dns-svc\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.014984 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-ovsdbserver-nb\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.015088 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-config\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.015107 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-ovsdbserver-sb\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.015142 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-dns-swift-storage-0\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.015177 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2482\" (UniqueName: \"kubernetes.io/projected/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-kube-api-access-m2482\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.015324 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-openstack-edpm-ipam\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.108472 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.117848 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-config\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.117882 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-ovsdbserver-sb\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.117909 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-dns-swift-storage-0\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.117935 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2482\" (UniqueName: \"kubernetes.io/projected/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-kube-api-access-m2482\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.118032 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-openstack-edpm-ipam\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.118079 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-dns-svc\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.118104 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-ovsdbserver-nb\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.119034 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-ovsdbserver-nb\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.119075 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-config\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.119214 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-ovsdbserver-sb\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.119550 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-openstack-edpm-ipam\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.119777 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-dns-svc\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.119991 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-dns-swift-storage-0\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.148236 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2482\" (UniqueName: \"kubernetes.io/projected/4c98e2af-23aa-4018-bf3f-cd92dbb41ebb-kube-api-access-m2482\") pod \"dnsmasq-dns-77b58f4b85-rwqgd\" (UID: \"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb\") " pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.219856 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-svc\") pod \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.220349 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-swift-storage-0\") pod \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.220390 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-nb\") pod \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.220505 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-sb\") pod \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.220651 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2flc4\" (UniqueName: \"kubernetes.io/projected/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-kube-api-access-2flc4\") pod \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.220696 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-config\") pod \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\" (UID: \"c2df2c65-9ffc-41ff-abcf-14b13aee5e97\") " Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.224633 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-kube-api-access-2flc4" (OuterVolumeSpecName: "kube-api-access-2flc4") pod "c2df2c65-9ffc-41ff-abcf-14b13aee5e97" (UID: "c2df2c65-9ffc-41ff-abcf-14b13aee5e97"). InnerVolumeSpecName "kube-api-access-2flc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.232695 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.265693 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c2df2c65-9ffc-41ff-abcf-14b13aee5e97" (UID: "c2df2c65-9ffc-41ff-abcf-14b13aee5e97"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.272130 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-config" (OuterVolumeSpecName: "config") pod "c2df2c65-9ffc-41ff-abcf-14b13aee5e97" (UID: "c2df2c65-9ffc-41ff-abcf-14b13aee5e97"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.272161 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c2df2c65-9ffc-41ff-abcf-14b13aee5e97" (UID: "c2df2c65-9ffc-41ff-abcf-14b13aee5e97"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.272696 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c2df2c65-9ffc-41ff-abcf-14b13aee5e97" (UID: "c2df2c65-9ffc-41ff-abcf-14b13aee5e97"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.274237 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c2df2c65-9ffc-41ff-abcf-14b13aee5e97" (UID: "c2df2c65-9ffc-41ff-abcf-14b13aee5e97"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.324053 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.324081 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2flc4\" (UniqueName: \"kubernetes.io/projected/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-kube-api-access-2flc4\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.324094 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.324121 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.324129 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.324137 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2df2c65-9ffc-41ff-abcf-14b13aee5e97-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.537397 4652 generic.go:334] "Generic (PLEG): container finished" podID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" containerID="3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9" exitCode=0 Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.537470 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" event={"ID":"c2df2c65-9ffc-41ff-abcf-14b13aee5e97","Type":"ContainerDied","Data":"3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9"} Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.537504 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" event={"ID":"c2df2c65-9ffc-41ff-abcf-14b13aee5e97","Type":"ContainerDied","Data":"238025f236b9bb0421cbaac6910a1a042faac4f05204000bc3b57847d69b4b72"} Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.537523 4652 scope.go:117] "RemoveContainer" containerID="3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.537718 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54599d8f7-bmqg6" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.567578 4652 scope.go:117] "RemoveContainer" containerID="65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.571236 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-bmqg6"] Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.579188 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54599d8f7-bmqg6"] Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.607595 4652 scope.go:117] "RemoveContainer" containerID="3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9" Dec 05 05:47:56 crc kubenswrapper[4652]: E1205 05:47:56.608243 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9\": container with ID starting with 3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9 not found: ID does not exist" containerID="3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.608287 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9"} err="failed to get container status \"3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9\": rpc error: code = NotFound desc = could not find container \"3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9\": container with ID starting with 3f3b3293170a6e106f32a8c2e518e495dd93a6f0755442669b78f44d8d6118e9 not found: ID does not exist" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.608321 4652 scope.go:117] "RemoveContainer" containerID="65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9" Dec 05 05:47:56 crc kubenswrapper[4652]: E1205 05:47:56.608806 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9\": container with ID starting with 65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9 not found: ID does not exist" containerID="65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9" Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.608837 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9"} err="failed to get container status \"65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9\": rpc error: code = NotFound desc = could not find container \"65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9\": container with ID starting with 65298dbdbedf61caccb799b25baacb8e90eb2aa29e037eec1cd532c46e186ec9 not found: ID does not exist" Dec 05 05:47:56 crc kubenswrapper[4652]: W1205 05:47:56.630660 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c98e2af_23aa_4018_bf3f_cd92dbb41ebb.slice/crio-4fd7ab41034ef722d52f1d3723fff81c34eb06c33f19c4e364c615720bebb42a WatchSource:0}: Error finding container 4fd7ab41034ef722d52f1d3723fff81c34eb06c33f19c4e364c615720bebb42a: Status 404 returned error can't find the container with id 4fd7ab41034ef722d52f1d3723fff81c34eb06c33f19c4e364c615720bebb42a Dec 05 05:47:56 crc kubenswrapper[4652]: I1205 05:47:56.633435 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77b58f4b85-rwqgd"] Dec 05 05:47:57 crc kubenswrapper[4652]: I1205 05:47:57.548610 4652 generic.go:334] "Generic (PLEG): container finished" podID="4c98e2af-23aa-4018-bf3f-cd92dbb41ebb" containerID="87a19eae7f1d0710a36f721776ecb957e423dc7c9f2549e99cfa529497645cbe" exitCode=0 Dec 05 05:47:57 crc kubenswrapper[4652]: I1205 05:47:57.548710 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" event={"ID":"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb","Type":"ContainerDied","Data":"87a19eae7f1d0710a36f721776ecb957e423dc7c9f2549e99cfa529497645cbe"} Dec 05 05:47:57 crc kubenswrapper[4652]: I1205 05:47:57.548965 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" event={"ID":"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb","Type":"ContainerStarted","Data":"4fd7ab41034ef722d52f1d3723fff81c34eb06c33f19c4e364c615720bebb42a"} Dec 05 05:47:58 crc kubenswrapper[4652]: I1205 05:47:58.134936 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" path="/var/lib/kubelet/pods/c2df2c65-9ffc-41ff-abcf-14b13aee5e97/volumes" Dec 05 05:47:58 crc kubenswrapper[4652]: I1205 05:47:58.559303 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" event={"ID":"4c98e2af-23aa-4018-bf3f-cd92dbb41ebb","Type":"ContainerStarted","Data":"5c712342eaaaa35e0b4d80b2074f26772c17e9d227854719292fbf5bb0a3d51f"} Dec 05 05:47:58 crc kubenswrapper[4652]: I1205 05:47:58.559677 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:47:58 crc kubenswrapper[4652]: I1205 05:47:58.580808 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" podStartSLOduration=3.580785954 podStartE2EDuration="3.580785954s" podCreationTimestamp="2025-12-05 05:47:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:47:58.572845173 +0000 UTC m=+1280.809575440" watchObservedRunningTime="2025-12-05 05:47:58.580785954 +0000 UTC m=+1280.817516221" Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.150860 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.151344 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.151383 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.152246 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3f2f5ede1a2a06f286baf6cd6bd2d3f9a5125ae0f0e1be6280b0ee6eb87a5ea7"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.152303 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://3f2f5ede1a2a06f286baf6cd6bd2d3f9a5125ae0f0e1be6280b0ee6eb87a5ea7" gracePeriod=600 Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.606545 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="3f2f5ede1a2a06f286baf6cd6bd2d3f9a5125ae0f0e1be6280b0ee6eb87a5ea7" exitCode=0 Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.606619 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"3f2f5ede1a2a06f286baf6cd6bd2d3f9a5125ae0f0e1be6280b0ee6eb87a5ea7"} Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.606861 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876"} Dec 05 05:48:04 crc kubenswrapper[4652]: I1205 05:48:04.606880 4652 scope.go:117] "RemoveContainer" containerID="0e8d98548aa27c73c99040f551d5e2c229be9c9f00418747bea9244c8abd5fdd" Dec 05 05:48:05 crc kubenswrapper[4652]: E1205 05:48:05.191093 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice/crio-4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.234578 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77b58f4b85-rwqgd" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.296170 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-x9jx5"] Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.296662 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" podUID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" containerName="dnsmasq-dns" containerID="cri-o://ab18b61dcd503e6f02af257d289af2329089c210c8ad7190443d55301d9d2e28" gracePeriod=10 Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.629342 4652 generic.go:334] "Generic (PLEG): container finished" podID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" containerID="ab18b61dcd503e6f02af257d289af2329089c210c8ad7190443d55301d9d2e28" exitCode=0 Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.629507 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" event={"ID":"fa3e3eff-c7ff-488f-aaa1-727896fda6f8","Type":"ContainerDied","Data":"ab18b61dcd503e6f02af257d289af2329089c210c8ad7190443d55301d9d2e28"} Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.725437 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.744671 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-config\") pod \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.744723 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-swift-storage-0\") pod \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.744837 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-nb\") pod \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.744877 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-svc\") pod \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.744900 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-openstack-edpm-ipam\") pod \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.745003 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-sb\") pod \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.745043 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2mmw\" (UniqueName: \"kubernetes.io/projected/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-kube-api-access-m2mmw\") pod \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\" (UID: \"fa3e3eff-c7ff-488f-aaa1-727896fda6f8\") " Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.762831 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-kube-api-access-m2mmw" (OuterVolumeSpecName: "kube-api-access-m2mmw") pod "fa3e3eff-c7ff-488f-aaa1-727896fda6f8" (UID: "fa3e3eff-c7ff-488f-aaa1-727896fda6f8"). InnerVolumeSpecName "kube-api-access-m2mmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.787862 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "fa3e3eff-c7ff-488f-aaa1-727896fda6f8" (UID: "fa3e3eff-c7ff-488f-aaa1-727896fda6f8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.791223 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fa3e3eff-c7ff-488f-aaa1-727896fda6f8" (UID: "fa3e3eff-c7ff-488f-aaa1-727896fda6f8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.794302 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "fa3e3eff-c7ff-488f-aaa1-727896fda6f8" (UID: "fa3e3eff-c7ff-488f-aaa1-727896fda6f8"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.796786 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fa3e3eff-c7ff-488f-aaa1-727896fda6f8" (UID: "fa3e3eff-c7ff-488f-aaa1-727896fda6f8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.797829 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fa3e3eff-c7ff-488f-aaa1-727896fda6f8" (UID: "fa3e3eff-c7ff-488f-aaa1-727896fda6f8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.809803 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-config" (OuterVolumeSpecName: "config") pod "fa3e3eff-c7ff-488f-aaa1-727896fda6f8" (UID: "fa3e3eff-c7ff-488f-aaa1-727896fda6f8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.848369 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.848409 4652 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.848429 4652 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.848440 4652 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.848449 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2mmw\" (UniqueName: \"kubernetes.io/projected/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-kube-api-access-m2mmw\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.848460 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:06 crc kubenswrapper[4652]: I1205 05:48:06.848470 4652 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/fa3e3eff-c7ff-488f-aaa1-727896fda6f8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:07 crc kubenswrapper[4652]: I1205 05:48:07.642434 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" event={"ID":"fa3e3eff-c7ff-488f-aaa1-727896fda6f8","Type":"ContainerDied","Data":"97db6bfe6bb61658d9c155cd4a518841b9d98f807b9b98063ca0edf4888845a8"} Dec 05 05:48:07 crc kubenswrapper[4652]: I1205 05:48:07.642950 4652 scope.go:117] "RemoveContainer" containerID="ab18b61dcd503e6f02af257d289af2329089c210c8ad7190443d55301d9d2e28" Dec 05 05:48:07 crc kubenswrapper[4652]: I1205 05:48:07.642510 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bf6c7df67-x9jx5" Dec 05 05:48:07 crc kubenswrapper[4652]: I1205 05:48:07.661776 4652 scope.go:117] "RemoveContainer" containerID="ce8ea4d4acbc3aef94bfa63bea8ff2aaecda8629c99f4e3129e3792e63a1cd27" Dec 05 05:48:07 crc kubenswrapper[4652]: I1205 05:48:07.681228 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-x9jx5"] Dec 05 05:48:07 crc kubenswrapper[4652]: I1205 05:48:07.691206 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bf6c7df67-x9jx5"] Dec 05 05:48:08 crc kubenswrapper[4652]: I1205 05:48:08.136277 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" path="/var/lib/kubelet/pods/fa3e3eff-c7ff-488f-aaa1-727896fda6f8/volumes" Dec 05 05:48:12 crc kubenswrapper[4652]: I1205 05:48:12.698488 4652 generic.go:334] "Generic (PLEG): container finished" podID="ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8" containerID="807f796575a81a6f3d55cbb033b5ed34dfbb602f172410223d4282ead9d50c40" exitCode=0 Dec 05 05:48:12 crc kubenswrapper[4652]: I1205 05:48:12.698600 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8","Type":"ContainerDied","Data":"807f796575a81a6f3d55cbb033b5ed34dfbb602f172410223d4282ead9d50c40"} Dec 05 05:48:12 crc kubenswrapper[4652]: I1205 05:48:12.701814 4652 generic.go:334] "Generic (PLEG): container finished" podID="f3a8fdb1-ae46-469f-9d70-a1947c935abd" containerID="c3ce8fd92e6c9b9be54d4c6843832ad528e763b21ab80b46df0a561bef925f17" exitCode=0 Dec 05 05:48:12 crc kubenswrapper[4652]: I1205 05:48:12.701869 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f3a8fdb1-ae46-469f-9d70-a1947c935abd","Type":"ContainerDied","Data":"c3ce8fd92e6c9b9be54d4c6843832ad528e763b21ab80b46df0a561bef925f17"} Dec 05 05:48:13 crc kubenswrapper[4652]: I1205 05:48:13.722941 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f3a8fdb1-ae46-469f-9d70-a1947c935abd","Type":"ContainerStarted","Data":"19431d61dc3ea4439cb54f994c8ac6ca10d9da14adc3f120afefebf097f51332"} Dec 05 05:48:13 crc kubenswrapper[4652]: I1205 05:48:13.724545 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:48:13 crc kubenswrapper[4652]: I1205 05:48:13.726712 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8","Type":"ContainerStarted","Data":"9ec3328b7700d2d308dcdc3429f67618e6e1b8cbcc2ef3b02b3b8b1015830fd5"} Dec 05 05:48:13 crc kubenswrapper[4652]: I1205 05:48:13.726913 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 05:48:13 crc kubenswrapper[4652]: I1205 05:48:13.743399 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=35.743368905 podStartE2EDuration="35.743368905s" podCreationTimestamp="2025-12-05 05:47:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:48:13.741566647 +0000 UTC m=+1295.978296934" watchObservedRunningTime="2025-12-05 05:48:13.743368905 +0000 UTC m=+1295.980099172" Dec 05 05:48:15 crc kubenswrapper[4652]: E1205 05:48:15.402714 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice/crio-4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f\": RecentStats: unable to find data in memory cache]" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.548974 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=46.548957076 podStartE2EDuration="46.548957076s" podCreationTimestamp="2025-12-05 05:47:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:48:13.770214985 +0000 UTC m=+1296.006945252" watchObservedRunningTime="2025-12-05 05:48:24.548957076 +0000 UTC m=+1306.785687343" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.555503 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv"] Dec 05 05:48:24 crc kubenswrapper[4652]: E1205 05:48:24.555918 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" containerName="dnsmasq-dns" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.555935 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" containerName="dnsmasq-dns" Dec 05 05:48:24 crc kubenswrapper[4652]: E1205 05:48:24.555958 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" containerName="init" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.555965 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" containerName="init" Dec 05 05:48:24 crc kubenswrapper[4652]: E1205 05:48:24.555995 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" containerName="init" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.556001 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" containerName="init" Dec 05 05:48:24 crc kubenswrapper[4652]: E1205 05:48:24.556010 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" containerName="dnsmasq-dns" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.556015 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" containerName="dnsmasq-dns" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.556214 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa3e3eff-c7ff-488f-aaa1-727896fda6f8" containerName="dnsmasq-dns" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.556236 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2df2c65-9ffc-41ff-abcf-14b13aee5e97" containerName="dnsmasq-dns" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.556928 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.558472 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.558829 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.558983 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.561624 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.563975 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv"] Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.695748 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.695816 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dkkp\" (UniqueName: \"kubernetes.io/projected/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-kube-api-access-7dkkp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.695848 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.695984 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.798466 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dkkp\" (UniqueName: \"kubernetes.io/projected/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-kube-api-access-7dkkp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.798513 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.798595 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.798704 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.803926 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.804050 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.804456 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.812298 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dkkp\" (UniqueName: \"kubernetes.io/projected/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-kube-api-access-7dkkp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:24 crc kubenswrapper[4652]: I1205 05:48:24.880207 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:25 crc kubenswrapper[4652]: I1205 05:48:25.351822 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv"] Dec 05 05:48:25 crc kubenswrapper[4652]: E1205 05:48:25.623126 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice/crio-4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f\": RecentStats: unable to find data in memory cache]" Dec 05 05:48:25 crc kubenswrapper[4652]: I1205 05:48:25.814246 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" event={"ID":"d5d1eb7f-02cc-42c2-a956-61c2883ae88c","Type":"ContainerStarted","Data":"afc422c4cf4496d0ff06821324c9d770b38fc4df41eb5c0e527da5a70bc29f13"} Dec 05 05:48:28 crc kubenswrapper[4652]: I1205 05:48:28.808409 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 05:48:28 crc kubenswrapper[4652]: I1205 05:48:28.810716 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 05:48:35 crc kubenswrapper[4652]: E1205 05:48:35.845478 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice/crio-4bfa9537eb3f3589e7f83d1094879493116ede6005e1384109c4779f6b4fe77f\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aa077b9_9612_44cf_b163_d0c1f9468787.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:48:37 crc kubenswrapper[4652]: I1205 05:48:37.914225 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" event={"ID":"d5d1eb7f-02cc-42c2-a956-61c2883ae88c","Type":"ContainerStarted","Data":"e73655a116c1140f5a0d4db3af26f8ac91bfc0d93297aa5f2e5b80de712e53ba"} Dec 05 05:48:37 crc kubenswrapper[4652]: I1205 05:48:37.933789 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" podStartSLOduration=1.789538842 podStartE2EDuration="13.933772421s" podCreationTimestamp="2025-12-05 05:48:24 +0000 UTC" firstStartedPulling="2025-12-05 05:48:25.360202884 +0000 UTC m=+1307.596933151" lastFinishedPulling="2025-12-05 05:48:37.504436463 +0000 UTC m=+1319.741166730" observedRunningTime="2025-12-05 05:48:37.925615864 +0000 UTC m=+1320.162346131" watchObservedRunningTime="2025-12-05 05:48:37.933772421 +0000 UTC m=+1320.170502677" Dec 05 05:48:48 crc kubenswrapper[4652]: I1205 05:48:48.995137 4652 generic.go:334] "Generic (PLEG): container finished" podID="d5d1eb7f-02cc-42c2-a956-61c2883ae88c" containerID="e73655a116c1140f5a0d4db3af26f8ac91bfc0d93297aa5f2e5b80de712e53ba" exitCode=0 Dec 05 05:48:48 crc kubenswrapper[4652]: I1205 05:48:48.995232 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" event={"ID":"d5d1eb7f-02cc-42c2-a956-61c2883ae88c","Type":"ContainerDied","Data":"e73655a116c1140f5a0d4db3af26f8ac91bfc0d93297aa5f2e5b80de712e53ba"} Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.361629 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.383587 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dkkp\" (UniqueName: \"kubernetes.io/projected/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-kube-api-access-7dkkp\") pod \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.383688 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-inventory\") pod \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.383787 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-repo-setup-combined-ca-bundle\") pod \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.383897 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-ssh-key\") pod \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\" (UID: \"d5d1eb7f-02cc-42c2-a956-61c2883ae88c\") " Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.388679 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-kube-api-access-7dkkp" (OuterVolumeSpecName: "kube-api-access-7dkkp") pod "d5d1eb7f-02cc-42c2-a956-61c2883ae88c" (UID: "d5d1eb7f-02cc-42c2-a956-61c2883ae88c"). InnerVolumeSpecName "kube-api-access-7dkkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.389260 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "d5d1eb7f-02cc-42c2-a956-61c2883ae88c" (UID: "d5d1eb7f-02cc-42c2-a956-61c2883ae88c"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.406251 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-inventory" (OuterVolumeSpecName: "inventory") pod "d5d1eb7f-02cc-42c2-a956-61c2883ae88c" (UID: "d5d1eb7f-02cc-42c2-a956-61c2883ae88c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.407376 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d5d1eb7f-02cc-42c2-a956-61c2883ae88c" (UID: "d5d1eb7f-02cc-42c2-a956-61c2883ae88c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.485772 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.485797 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dkkp\" (UniqueName: \"kubernetes.io/projected/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-kube-api-access-7dkkp\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.485810 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:50 crc kubenswrapper[4652]: I1205 05:48:50.485818 4652 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5d1eb7f-02cc-42c2-a956-61c2883ae88c-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.011199 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" event={"ID":"d5d1eb7f-02cc-42c2-a956-61c2883ae88c","Type":"ContainerDied","Data":"afc422c4cf4496d0ff06821324c9d770b38fc4df41eb5c0e527da5a70bc29f13"} Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.011237 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afc422c4cf4496d0ff06821324c9d770b38fc4df41eb5c0e527da5a70bc29f13" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.011267 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.061359 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx"] Dec 05 05:48:51 crc kubenswrapper[4652]: E1205 05:48:51.061824 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5d1eb7f-02cc-42c2-a956-61c2883ae88c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.061843 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5d1eb7f-02cc-42c2-a956-61c2883ae88c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.062002 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5d1eb7f-02cc-42c2-a956-61c2883ae88c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.062696 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.064026 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.064427 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.064595 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.064837 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.068898 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx"] Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.095670 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js8pn\" (UniqueName: \"kubernetes.io/projected/81e62c63-5cd1-4cac-b717-f37452b33ebe-kube-api-access-js8pn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.095813 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.095909 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.197165 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.197464 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js8pn\" (UniqueName: \"kubernetes.io/projected/81e62c63-5cd1-4cac-b717-f37452b33ebe-kube-api-access-js8pn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.197646 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.201001 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.201022 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.211353 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js8pn\" (UniqueName: \"kubernetes.io/projected/81e62c63-5cd1-4cac-b717-f37452b33ebe-kube-api-access-js8pn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jtzrx\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.375458 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:51 crc kubenswrapper[4652]: W1205 05:48:51.816386 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81e62c63_5cd1_4cac_b717_f37452b33ebe.slice/crio-c7dc74efb0ebba6d11a61e9004249de0a4cd83953d0334dd20a120e6a098fd63 WatchSource:0}: Error finding container c7dc74efb0ebba6d11a61e9004249de0a4cd83953d0334dd20a120e6a098fd63: Status 404 returned error can't find the container with id c7dc74efb0ebba6d11a61e9004249de0a4cd83953d0334dd20a120e6a098fd63 Dec 05 05:48:51 crc kubenswrapper[4652]: I1205 05:48:51.816910 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx"] Dec 05 05:48:52 crc kubenswrapper[4652]: I1205 05:48:52.019460 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" event={"ID":"81e62c63-5cd1-4cac-b717-f37452b33ebe","Type":"ContainerStarted","Data":"c7dc74efb0ebba6d11a61e9004249de0a4cd83953d0334dd20a120e6a098fd63"} Dec 05 05:48:53 crc kubenswrapper[4652]: I1205 05:48:53.028600 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" event={"ID":"81e62c63-5cd1-4cac-b717-f37452b33ebe","Type":"ContainerStarted","Data":"996a5c36653d6e26fcdb46c1510e3bafd0bc91dcc4bf05a36f3f4bce4fc52842"} Dec 05 05:48:53 crc kubenswrapper[4652]: I1205 05:48:53.044369 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" podStartSLOduration=1.553156264 podStartE2EDuration="2.044355996s" podCreationTimestamp="2025-12-05 05:48:51 +0000 UTC" firstStartedPulling="2025-12-05 05:48:51.819324671 +0000 UTC m=+1334.056054938" lastFinishedPulling="2025-12-05 05:48:52.310524403 +0000 UTC m=+1334.547254670" observedRunningTime="2025-12-05 05:48:53.040357167 +0000 UTC m=+1335.277087434" watchObservedRunningTime="2025-12-05 05:48:53.044355996 +0000 UTC m=+1335.281086263" Dec 05 05:48:55 crc kubenswrapper[4652]: I1205 05:48:55.044670 4652 generic.go:334] "Generic (PLEG): container finished" podID="81e62c63-5cd1-4cac-b717-f37452b33ebe" containerID="996a5c36653d6e26fcdb46c1510e3bafd0bc91dcc4bf05a36f3f4bce4fc52842" exitCode=0 Dec 05 05:48:55 crc kubenswrapper[4652]: I1205 05:48:55.044756 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" event={"ID":"81e62c63-5cd1-4cac-b717-f37452b33ebe","Type":"ContainerDied","Data":"996a5c36653d6e26fcdb46c1510e3bafd0bc91dcc4bf05a36f3f4bce4fc52842"} Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.346541 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.484450 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-inventory\") pod \"81e62c63-5cd1-4cac-b717-f37452b33ebe\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.484751 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-ssh-key\") pod \"81e62c63-5cd1-4cac-b717-f37452b33ebe\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.484827 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js8pn\" (UniqueName: \"kubernetes.io/projected/81e62c63-5cd1-4cac-b717-f37452b33ebe-kube-api-access-js8pn\") pod \"81e62c63-5cd1-4cac-b717-f37452b33ebe\" (UID: \"81e62c63-5cd1-4cac-b717-f37452b33ebe\") " Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.490215 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81e62c63-5cd1-4cac-b717-f37452b33ebe-kube-api-access-js8pn" (OuterVolumeSpecName: "kube-api-access-js8pn") pod "81e62c63-5cd1-4cac-b717-f37452b33ebe" (UID: "81e62c63-5cd1-4cac-b717-f37452b33ebe"). InnerVolumeSpecName "kube-api-access-js8pn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.508524 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-inventory" (OuterVolumeSpecName: "inventory") pod "81e62c63-5cd1-4cac-b717-f37452b33ebe" (UID: "81e62c63-5cd1-4cac-b717-f37452b33ebe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.510805 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "81e62c63-5cd1-4cac-b717-f37452b33ebe" (UID: "81e62c63-5cd1-4cac-b717-f37452b33ebe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.586999 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js8pn\" (UniqueName: \"kubernetes.io/projected/81e62c63-5cd1-4cac-b717-f37452b33ebe-kube-api-access-js8pn\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.587033 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:56 crc kubenswrapper[4652]: I1205 05:48:56.587043 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81e62c63-5cd1-4cac-b717-f37452b33ebe-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.062680 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" event={"ID":"81e62c63-5cd1-4cac-b717-f37452b33ebe","Type":"ContainerDied","Data":"c7dc74efb0ebba6d11a61e9004249de0a4cd83953d0334dd20a120e6a098fd63"} Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.062718 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7dc74efb0ebba6d11a61e9004249de0a4cd83953d0334dd20a120e6a098fd63" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.062740 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jtzrx" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.118279 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs"] Dec 05 05:48:57 crc kubenswrapper[4652]: E1205 05:48:57.119107 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81e62c63-5cd1-4cac-b717-f37452b33ebe" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.119126 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="81e62c63-5cd1-4cac-b717-f37452b33ebe" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.119325 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="81e62c63-5cd1-4cac-b717-f37452b33ebe" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.119995 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.122004 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.122155 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.123226 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.124648 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.126050 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs"] Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.195901 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.196011 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.196071 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hfrk\" (UniqueName: \"kubernetes.io/projected/627ed016-e8a7-41f8-b474-60cf9b24e5ba-kube-api-access-4hfrk\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.196108 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.298345 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.298479 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hfrk\" (UniqueName: \"kubernetes.io/projected/627ed016-e8a7-41f8-b474-60cf9b24e5ba-kube-api-access-4hfrk\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.298541 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.298702 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.302377 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.302972 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.303001 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.312826 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hfrk\" (UniqueName: \"kubernetes.io/projected/627ed016-e8a7-41f8-b474-60cf9b24e5ba-kube-api-access-4hfrk\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.434824 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:48:57 crc kubenswrapper[4652]: I1205 05:48:57.878447 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs"] Dec 05 05:48:58 crc kubenswrapper[4652]: I1205 05:48:58.070298 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" event={"ID":"627ed016-e8a7-41f8-b474-60cf9b24e5ba","Type":"ContainerStarted","Data":"3545d2c4d7bed0ab48e513e7cc7fe68c326d22fcada25c804b1ea30be3ec2c32"} Dec 05 05:48:59 crc kubenswrapper[4652]: I1205 05:48:59.078028 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" event={"ID":"627ed016-e8a7-41f8-b474-60cf9b24e5ba","Type":"ContainerStarted","Data":"ffdf7565e98469328cfa404710102a16ec1164526ddf0f0d6251ddc07fc95694"} Dec 05 05:48:59 crc kubenswrapper[4652]: I1205 05:48:59.095789 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" podStartSLOduration=1.5832182700000001 podStartE2EDuration="2.095771855s" podCreationTimestamp="2025-12-05 05:48:57 +0000 UTC" firstStartedPulling="2025-12-05 05:48:57.883159452 +0000 UTC m=+1340.119889719" lastFinishedPulling="2025-12-05 05:48:58.395713037 +0000 UTC m=+1340.632443304" observedRunningTime="2025-12-05 05:48:59.087832928 +0000 UTC m=+1341.324563194" watchObservedRunningTime="2025-12-05 05:48:59.095771855 +0000 UTC m=+1341.332502122" Dec 05 05:49:39 crc kubenswrapper[4652]: I1205 05:49:39.515720 4652 scope.go:117] "RemoveContainer" containerID="37af19b6cf54b6cc05cc4edf0f26ba7b4477f1ca076a54faac4627da1be7a68b" Dec 05 05:50:04 crc kubenswrapper[4652]: I1205 05:50:04.150808 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:50:04 crc kubenswrapper[4652]: I1205 05:50:04.151161 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:50:34 crc kubenswrapper[4652]: I1205 05:50:34.150949 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:50:34 crc kubenswrapper[4652]: I1205 05:50:34.151527 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:50:39 crc kubenswrapper[4652]: I1205 05:50:39.563458 4652 scope.go:117] "RemoveContainer" containerID="4b405aa28f93563e533b354028f7c907c7631e66ce2ac3589c09dc152d3b6357" Dec 05 05:50:39 crc kubenswrapper[4652]: I1205 05:50:39.603600 4652 scope.go:117] "RemoveContainer" containerID="be42c2b587bcb6fc43f29d4a1645290a25b8152eb4eb887eb9bb13d9e31a7e42" Dec 05 05:50:52 crc kubenswrapper[4652]: I1205 05:50:52.334396 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-65dffd4ccf-cqtxw" podUID="92728b29-e7ee-4aa6-b072-10c3abc0e22a" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 05 05:51:04 crc kubenswrapper[4652]: I1205 05:51:04.149945 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:51:04 crc kubenswrapper[4652]: I1205 05:51:04.150305 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:51:04 crc kubenswrapper[4652]: I1205 05:51:04.150342 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:51:04 crc kubenswrapper[4652]: I1205 05:51:04.150768 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:51:04 crc kubenswrapper[4652]: I1205 05:51:04.150816 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" gracePeriod=600 Dec 05 05:51:04 crc kubenswrapper[4652]: E1205 05:51:04.265435 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:51:05 crc kubenswrapper[4652]: I1205 05:51:05.036571 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" exitCode=0 Dec 05 05:51:05 crc kubenswrapper[4652]: I1205 05:51:05.036587 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876"} Dec 05 05:51:05 crc kubenswrapper[4652]: I1205 05:51:05.036641 4652 scope.go:117] "RemoveContainer" containerID="3f2f5ede1a2a06f286baf6cd6bd2d3f9a5125ae0f0e1be6280b0ee6eb87a5ea7" Dec 05 05:51:05 crc kubenswrapper[4652]: I1205 05:51:05.037000 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:51:05 crc kubenswrapper[4652]: E1205 05:51:05.037282 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:51:19 crc kubenswrapper[4652]: I1205 05:51:19.125946 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:51:19 crc kubenswrapper[4652]: E1205 05:51:19.126523 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:51:30 crc kubenswrapper[4652]: I1205 05:51:30.125782 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:51:30 crc kubenswrapper[4652]: E1205 05:51:30.126335 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:51:45 crc kubenswrapper[4652]: I1205 05:51:45.126153 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:51:45 crc kubenswrapper[4652]: E1205 05:51:45.126962 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:52:00 crc kubenswrapper[4652]: I1205 05:52:00.125391 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:52:00 crc kubenswrapper[4652]: E1205 05:52:00.125988 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:52:13 crc kubenswrapper[4652]: I1205 05:52:13.125149 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:52:13 crc kubenswrapper[4652]: E1205 05:52:13.125939 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:52:14 crc kubenswrapper[4652]: I1205 05:52:14.530503 4652 generic.go:334] "Generic (PLEG): container finished" podID="627ed016-e8a7-41f8-b474-60cf9b24e5ba" containerID="ffdf7565e98469328cfa404710102a16ec1164526ddf0f0d6251ddc07fc95694" exitCode=0 Dec 05 05:52:14 crc kubenswrapper[4652]: I1205 05:52:14.530594 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" event={"ID":"627ed016-e8a7-41f8-b474-60cf9b24e5ba","Type":"ContainerDied","Data":"ffdf7565e98469328cfa404710102a16ec1164526ddf0f0d6251ddc07fc95694"} Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.866655 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.932892 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-ssh-key\") pod \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.932972 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-inventory\") pod \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.932992 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-bootstrap-combined-ca-bundle\") pod \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.933028 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hfrk\" (UniqueName: \"kubernetes.io/projected/627ed016-e8a7-41f8-b474-60cf9b24e5ba-kube-api-access-4hfrk\") pod \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\" (UID: \"627ed016-e8a7-41f8-b474-60cf9b24e5ba\") " Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.937521 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/627ed016-e8a7-41f8-b474-60cf9b24e5ba-kube-api-access-4hfrk" (OuterVolumeSpecName: "kube-api-access-4hfrk") pod "627ed016-e8a7-41f8-b474-60cf9b24e5ba" (UID: "627ed016-e8a7-41f8-b474-60cf9b24e5ba"). InnerVolumeSpecName "kube-api-access-4hfrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.937703 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "627ed016-e8a7-41f8-b474-60cf9b24e5ba" (UID: "627ed016-e8a7-41f8-b474-60cf9b24e5ba"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.954805 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-inventory" (OuterVolumeSpecName: "inventory") pod "627ed016-e8a7-41f8-b474-60cf9b24e5ba" (UID: "627ed016-e8a7-41f8-b474-60cf9b24e5ba"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:15 crc kubenswrapper[4652]: I1205 05:52:15.955813 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "627ed016-e8a7-41f8-b474-60cf9b24e5ba" (UID: "627ed016-e8a7-41f8-b474-60cf9b24e5ba"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.034677 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.034703 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.034713 4652 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627ed016-e8a7-41f8-b474-60cf9b24e5ba-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.034723 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hfrk\" (UniqueName: \"kubernetes.io/projected/627ed016-e8a7-41f8-b474-60cf9b24e5ba-kube-api-access-4hfrk\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.545268 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" event={"ID":"627ed016-e8a7-41f8-b474-60cf9b24e5ba","Type":"ContainerDied","Data":"3545d2c4d7bed0ab48e513e7cc7fe68c326d22fcada25c804b1ea30be3ec2c32"} Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.545469 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3545d2c4d7bed0ab48e513e7cc7fe68c326d22fcada25c804b1ea30be3ec2c32" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.545316 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.607026 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9"] Dec 05 05:52:16 crc kubenswrapper[4652]: E1205 05:52:16.607407 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="627ed016-e8a7-41f8-b474-60cf9b24e5ba" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.607426 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="627ed016-e8a7-41f8-b474-60cf9b24e5ba" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.607669 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="627ed016-e8a7-41f8-b474-60cf9b24e5ba" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.608305 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.614077 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.615089 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.615131 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.615246 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.617585 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9"] Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.644878 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6b9d\" (UniqueName: \"kubernetes.io/projected/91378bb9-4654-44bb-9162-220068b36036-kube-api-access-q6b9d\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.644968 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.644998 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.746240 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6b9d\" (UniqueName: \"kubernetes.io/projected/91378bb9-4654-44bb-9162-220068b36036-kube-api-access-q6b9d\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.746308 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.746332 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.749494 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.750161 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.759619 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6b9d\" (UniqueName: \"kubernetes.io/projected/91378bb9-4654-44bb-9162-220068b36036-kube-api-access-q6b9d\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:16 crc kubenswrapper[4652]: I1205 05:52:16.922650 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:52:17 crc kubenswrapper[4652]: I1205 05:52:17.366337 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9"] Dec 05 05:52:17 crc kubenswrapper[4652]: I1205 05:52:17.370982 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 05:52:17 crc kubenswrapper[4652]: I1205 05:52:17.553021 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" event={"ID":"91378bb9-4654-44bb-9162-220068b36036","Type":"ContainerStarted","Data":"00375153764a5bcb51cb18e8b7d4bbb67410ff4af6c48b604bd19e60c171fd4d"} Dec 05 05:52:18 crc kubenswrapper[4652]: I1205 05:52:18.561976 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" event={"ID":"91378bb9-4654-44bb-9162-220068b36036","Type":"ContainerStarted","Data":"87ea10d9ddd43e559addad712afec3c7c69eea0efa50d0878abdc5b8a3452534"} Dec 05 05:52:18 crc kubenswrapper[4652]: I1205 05:52:18.579239 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" podStartSLOduration=1.9682516890000001 podStartE2EDuration="2.579226032s" podCreationTimestamp="2025-12-05 05:52:16 +0000 UTC" firstStartedPulling="2025-12-05 05:52:17.370759807 +0000 UTC m=+1539.607490075" lastFinishedPulling="2025-12-05 05:52:17.981734151 +0000 UTC m=+1540.218464418" observedRunningTime="2025-12-05 05:52:18.575631622 +0000 UTC m=+1540.812361889" watchObservedRunningTime="2025-12-05 05:52:18.579226032 +0000 UTC m=+1540.815956298" Dec 05 05:52:26 crc kubenswrapper[4652]: I1205 05:52:26.126439 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:52:26 crc kubenswrapper[4652]: E1205 05:52:26.127274 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:52:39 crc kubenswrapper[4652]: I1205 05:52:39.675232 4652 scope.go:117] "RemoveContainer" containerID="a4477c831f463d3d879e9f718ce33ab5a312877b9173b72af7838aee926dd637" Dec 05 05:52:39 crc kubenswrapper[4652]: I1205 05:52:39.694633 4652 scope.go:117] "RemoveContainer" containerID="458fdd5791d5738328580b67ef5993b1b7451eec2ac452b0d6c30375d9840177" Dec 05 05:52:39 crc kubenswrapper[4652]: I1205 05:52:39.715237 4652 scope.go:117] "RemoveContainer" containerID="65bcbe4b093392356997e024d7c837faccfc88c829ef905a59a8865820a2588b" Dec 05 05:52:39 crc kubenswrapper[4652]: E1205 05:52:39.715340 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"458fdd5791d5738328580b67ef5993b1b7451eec2ac452b0d6c30375d9840177\": container with ID starting with 458fdd5791d5738328580b67ef5993b1b7451eec2ac452b0d6c30375d9840177 not found: ID does not exist" containerID="458fdd5791d5738328580b67ef5993b1b7451eec2ac452b0d6c30375d9840177" Dec 05 05:52:39 crc kubenswrapper[4652]: I1205 05:52:39.732022 4652 scope.go:117] "RemoveContainer" containerID="1ae366a75281b020232cf9c323b30d4ae8bf6d5ef8ee1f4b804863da2f55d2a9" Dec 05 05:52:39 crc kubenswrapper[4652]: I1205 05:52:39.749608 4652 scope.go:117] "RemoveContainer" containerID="373d6f73b6f40a3fa2966065604f864209bb95e32806141a4f866f057cb5eaf9" Dec 05 05:52:41 crc kubenswrapper[4652]: I1205 05:52:41.125785 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:52:41 crc kubenswrapper[4652]: E1205 05:52:41.126076 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:52:54 crc kubenswrapper[4652]: I1205 05:52:54.127169 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:52:54 crc kubenswrapper[4652]: E1205 05:52:54.128346 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:53:07 crc kubenswrapper[4652]: I1205 05:53:07.126103 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:53:07 crc kubenswrapper[4652]: E1205 05:53:07.126707 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.042931 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-d422-account-create-update-kkjwm"] Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.052232 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-t9r7h"] Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.062468 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-b27e-account-create-update-tntgc"] Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.069068 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-d422-account-create-update-kkjwm"] Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.074886 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-t9r7h"] Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.080362 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-b27e-account-create-update-tntgc"] Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.086168 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-pssmm"] Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.091824 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-pssmm"] Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.134254 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="380194ba-35a8-4f22-ae98-fd2745e61bff" path="/var/lib/kubelet/pods/380194ba-35a8-4f22-ae98-fd2745e61bff/volumes" Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.134807 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3abdd17b-b27d-4404-b8e4-845c4c04152a" path="/var/lib/kubelet/pods/3abdd17b-b27d-4404-b8e4-845c4c04152a/volumes" Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.135302 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7721fbf3-a28b-4c0c-b581-54946dda1b02" path="/var/lib/kubelet/pods/7721fbf3-a28b-4c0c-b581-54946dda1b02/volumes" Dec 05 05:53:14 crc kubenswrapper[4652]: I1205 05:53:14.135804 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f6ae44c-0c8e-43db-a1a0-77ed8ae83520" path="/var/lib/kubelet/pods/7f6ae44c-0c8e-43db-a1a0-77ed8ae83520/volumes" Dec 05 05:53:16 crc kubenswrapper[4652]: I1205 05:53:16.021661 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-rjlwl"] Dec 05 05:53:16 crc kubenswrapper[4652]: I1205 05:53:16.027679 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-rjlwl"] Dec 05 05:53:16 crc kubenswrapper[4652]: I1205 05:53:16.134465 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f95d2d0b-364e-4bd4-88bd-ffe18c9f8102" path="/var/lib/kubelet/pods/f95d2d0b-364e-4bd4-88bd-ffe18c9f8102/volumes" Dec 05 05:53:17 crc kubenswrapper[4652]: I1205 05:53:17.022332 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-9607-account-create-update-p8nxg"] Dec 05 05:53:17 crc kubenswrapper[4652]: I1205 05:53:17.030147 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-9607-account-create-update-p8nxg"] Dec 05 05:53:18 crc kubenswrapper[4652]: I1205 05:53:18.135136 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c3f37ef-831b-4a8c-a453-7fc85aaa37e2" path="/var/lib/kubelet/pods/9c3f37ef-831b-4a8c-a453-7fc85aaa37e2/volumes" Dec 05 05:53:22 crc kubenswrapper[4652]: I1205 05:53:22.125504 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:53:22 crc kubenswrapper[4652]: E1205 05:53:22.125935 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:53:33 crc kubenswrapper[4652]: I1205 05:53:33.125909 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:53:33 crc kubenswrapper[4652]: E1205 05:53:33.126437 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:53:39 crc kubenswrapper[4652]: I1205 05:53:39.815841 4652 scope.go:117] "RemoveContainer" containerID="2a1bb7548b46d41253554853e5b5b68e1a82f21524a281e033123cd218d00e2c" Dec 05 05:53:39 crc kubenswrapper[4652]: I1205 05:53:39.835516 4652 scope.go:117] "RemoveContainer" containerID="b1c16519846df87cee6af3758d3a3b6b917bdb58f45ab7202013b40ac0e2176e" Dec 05 05:53:39 crc kubenswrapper[4652]: I1205 05:53:39.869860 4652 scope.go:117] "RemoveContainer" containerID="d4b2f7541bff3469e82bc2a38a7c194eba0aa8f0c5c38077e38245a48951957e" Dec 05 05:53:39 crc kubenswrapper[4652]: I1205 05:53:39.919997 4652 scope.go:117] "RemoveContainer" containerID="4c16140ac10c08488733b1e2c7cd4077a2b87972e291e28e41a347b35106d26b" Dec 05 05:53:39 crc kubenswrapper[4652]: I1205 05:53:39.954008 4652 scope.go:117] "RemoveContainer" containerID="b540e2589aa35a038ade87aaa4320725fa48cb7b7e97029a22128dff624b1594" Dec 05 05:53:39 crc kubenswrapper[4652]: I1205 05:53:39.985610 4652 scope.go:117] "RemoveContainer" containerID="83a16c7851d170a2f679ecbd5b35876ef7f68d20f0b69cea038b41eadf2908b4" Dec 05 05:53:45 crc kubenswrapper[4652]: I1205 05:53:45.893354 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bwv4d"] Dec 05 05:53:45 crc kubenswrapper[4652]: I1205 05:53:45.895602 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:45 crc kubenswrapper[4652]: I1205 05:53:45.902985 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bwv4d"] Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.097739 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-utilities\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.097863 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9c9n\" (UniqueName: \"kubernetes.io/projected/09d8394f-b81b-41b1-8ea9-e8661fa604d2-kube-api-access-j9c9n\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.097892 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-catalog-content\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.125808 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:53:46 crc kubenswrapper[4652]: E1205 05:53:46.126050 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.201312 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-utilities\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.201621 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9c9n\" (UniqueName: \"kubernetes.io/projected/09d8394f-b81b-41b1-8ea9-e8661fa604d2-kube-api-access-j9c9n\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.201666 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-catalog-content\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.202176 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-utilities\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.202202 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-catalog-content\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.217054 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9c9n\" (UniqueName: \"kubernetes.io/projected/09d8394f-b81b-41b1-8ea9-e8661fa604d2-kube-api-access-j9c9n\") pod \"redhat-operators-bwv4d\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.217896 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:46 crc kubenswrapper[4652]: I1205 05:53:46.639254 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bwv4d"] Dec 05 05:53:47 crc kubenswrapper[4652]: I1205 05:53:47.262194 4652 generic.go:334] "Generic (PLEG): container finished" podID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerID="7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648" exitCode=0 Dec 05 05:53:47 crc kubenswrapper[4652]: I1205 05:53:47.262245 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwv4d" event={"ID":"09d8394f-b81b-41b1-8ea9-e8661fa604d2","Type":"ContainerDied","Data":"7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648"} Dec 05 05:53:47 crc kubenswrapper[4652]: I1205 05:53:47.262401 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwv4d" event={"ID":"09d8394f-b81b-41b1-8ea9-e8661fa604d2","Type":"ContainerStarted","Data":"77dcafc2e25c4b77efda9f8a0f0874d00481d4f3ce7f248a908f8bee20ca229c"} Dec 05 05:53:48 crc kubenswrapper[4652]: I1205 05:53:48.272299 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwv4d" event={"ID":"09d8394f-b81b-41b1-8ea9-e8661fa604d2","Type":"ContainerStarted","Data":"3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af"} Dec 05 05:53:49 crc kubenswrapper[4652]: I1205 05:53:49.281435 4652 generic.go:334] "Generic (PLEG): container finished" podID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerID="3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af" exitCode=0 Dec 05 05:53:49 crc kubenswrapper[4652]: I1205 05:53:49.281544 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwv4d" event={"ID":"09d8394f-b81b-41b1-8ea9-e8661fa604d2","Type":"ContainerDied","Data":"3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af"} Dec 05 05:53:50 crc kubenswrapper[4652]: I1205 05:53:50.291092 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwv4d" event={"ID":"09d8394f-b81b-41b1-8ea9-e8661fa604d2","Type":"ContainerStarted","Data":"6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e"} Dec 05 05:53:50 crc kubenswrapper[4652]: I1205 05:53:50.303959 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bwv4d" podStartSLOduration=2.807390015 podStartE2EDuration="5.303946353s" podCreationTimestamp="2025-12-05 05:53:45 +0000 UTC" firstStartedPulling="2025-12-05 05:53:47.263712814 +0000 UTC m=+1629.500443081" lastFinishedPulling="2025-12-05 05:53:49.760269152 +0000 UTC m=+1631.996999419" observedRunningTime="2025-12-05 05:53:50.302234224 +0000 UTC m=+1632.538964491" watchObservedRunningTime="2025-12-05 05:53:50.303946353 +0000 UTC m=+1632.540676619" Dec 05 05:53:51 crc kubenswrapper[4652]: I1205 05:53:51.032996 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1b24-account-create-update-9xqkv"] Dec 05 05:53:51 crc kubenswrapper[4652]: I1205 05:53:51.044002 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-6p9w6"] Dec 05 05:53:51 crc kubenswrapper[4652]: I1205 05:53:51.051144 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-6rksq"] Dec 05 05:53:51 crc kubenswrapper[4652]: I1205 05:53:51.057951 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-6p9w6"] Dec 05 05:53:51 crc kubenswrapper[4652]: I1205 05:53:51.063979 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-d6b4-account-create-update-m556k"] Dec 05 05:53:51 crc kubenswrapper[4652]: I1205 05:53:51.069472 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1b24-account-create-update-9xqkv"] Dec 05 05:53:51 crc kubenswrapper[4652]: I1205 05:53:51.075680 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-6rksq"] Dec 05 05:53:51 crc kubenswrapper[4652]: I1205 05:53:51.081160 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-d6b4-account-create-update-m556k"] Dec 05 05:53:52 crc kubenswrapper[4652]: I1205 05:53:52.135294 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="296a1c86-330b-46f9-9ae3-53b42c2e6cb8" path="/var/lib/kubelet/pods/296a1c86-330b-46f9-9ae3-53b42c2e6cb8/volumes" Dec 05 05:53:52 crc kubenswrapper[4652]: I1205 05:53:52.135869 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="875f6fb5-17e4-4ebf-bb40-7cecca7662ae" path="/var/lib/kubelet/pods/875f6fb5-17e4-4ebf-bb40-7cecca7662ae/volumes" Dec 05 05:53:52 crc kubenswrapper[4652]: I1205 05:53:52.136392 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e96d2f64-5044-4f9a-908d-1f31671b7ee5" path="/var/lib/kubelet/pods/e96d2f64-5044-4f9a-908d-1f31671b7ee5/volumes" Dec 05 05:53:52 crc kubenswrapper[4652]: I1205 05:53:52.137252 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f27cfa6f-024e-4274-9788-11b4d959f23b" path="/var/lib/kubelet/pods/f27cfa6f-024e-4274-9788-11b4d959f23b/volumes" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.280391 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bd6fv"] Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.283332 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.289212 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bd6fv"] Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.320270 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kckfq\" (UniqueName: \"kubernetes.io/projected/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-kube-api-access-kckfq\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.320353 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-utilities\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.320405 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-catalog-content\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.422755 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kckfq\" (UniqueName: \"kubernetes.io/projected/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-kube-api-access-kckfq\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.422813 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-utilities\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.422866 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-catalog-content\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.423237 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-utilities\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.423305 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-catalog-content\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.441474 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kckfq\" (UniqueName: \"kubernetes.io/projected/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-kube-api-access-kckfq\") pod \"certified-operators-bd6fv\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:53 crc kubenswrapper[4652]: I1205 05:53:53.600037 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:53:54 crc kubenswrapper[4652]: I1205 05:53:54.028145 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bd6fv"] Dec 05 05:53:54 crc kubenswrapper[4652]: W1205 05:53:54.028720 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d3f5e89_74d5_4186_8b1e_bc33a58c7970.slice/crio-ec024a3ccfa257c7e5f79e27cca7dbad2f8c6cf5708e6b98c6d6da938d8d56ac WatchSource:0}: Error finding container ec024a3ccfa257c7e5f79e27cca7dbad2f8c6cf5708e6b98c6d6da938d8d56ac: Status 404 returned error can't find the container with id ec024a3ccfa257c7e5f79e27cca7dbad2f8c6cf5708e6b98c6d6da938d8d56ac Dec 05 05:53:54 crc kubenswrapper[4652]: I1205 05:53:54.319954 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bd6fv" event={"ID":"3d3f5e89-74d5-4186-8b1e-bc33a58c7970","Type":"ContainerStarted","Data":"1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3"} Dec 05 05:53:54 crc kubenswrapper[4652]: I1205 05:53:54.319992 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bd6fv" event={"ID":"3d3f5e89-74d5-4186-8b1e-bc33a58c7970","Type":"ContainerStarted","Data":"ec024a3ccfa257c7e5f79e27cca7dbad2f8c6cf5708e6b98c6d6da938d8d56ac"} Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.026283 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-3c89-account-create-update-rctff"] Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.041666 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-3c89-account-create-update-rctff"] Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.050157 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-g4pq9"] Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.057890 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-0544-account-create-update-6ht5w"] Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.064942 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-7n6qs"] Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.070626 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-g4pq9"] Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.076699 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-0544-account-create-update-6ht5w"] Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.082025 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-7n6qs"] Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.329456 4652 generic.go:334] "Generic (PLEG): container finished" podID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerID="1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3" exitCode=0 Dec 05 05:53:55 crc kubenswrapper[4652]: I1205 05:53:55.329505 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bd6fv" event={"ID":"3d3f5e89-74d5-4186-8b1e-bc33a58c7970","Type":"ContainerDied","Data":"1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3"} Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.136013 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ce2513b-6436-4167-8796-3769eb3cba5e" path="/var/lib/kubelet/pods/3ce2513b-6436-4167-8796-3769eb3cba5e/volumes" Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.136690 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebe437ed-e42a-41b6-a50c-9678c3807f8c" path="/var/lib/kubelet/pods/ebe437ed-e42a-41b6-a50c-9678c3807f8c/volumes" Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.137201 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdedbbd-400f-4cbe-810b-ee45f74678ce" path="/var/lib/kubelet/pods/efdedbbd-400f-4cbe-810b-ee45f74678ce/volumes" Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.137777 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6002596-a7c4-4efc-b162-4bdcc1cf63b8" path="/var/lib/kubelet/pods/f6002596-a7c4-4efc-b162-4bdcc1cf63b8/volumes" Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.219046 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.219230 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.251167 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.337387 4652 generic.go:334] "Generic (PLEG): container finished" podID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerID="7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549" exitCode=0 Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.337471 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bd6fv" event={"ID":"3d3f5e89-74d5-4186-8b1e-bc33a58c7970","Type":"ContainerDied","Data":"7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549"} Dec 05 05:53:56 crc kubenswrapper[4652]: I1205 05:53:56.370089 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:57 crc kubenswrapper[4652]: I1205 05:53:57.126081 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:53:57 crc kubenswrapper[4652]: E1205 05:53:57.126521 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:53:57 crc kubenswrapper[4652]: I1205 05:53:57.347935 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bd6fv" event={"ID":"3d3f5e89-74d5-4186-8b1e-bc33a58c7970","Type":"ContainerStarted","Data":"5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2"} Dec 05 05:53:57 crc kubenswrapper[4652]: I1205 05:53:57.368001 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bd6fv" podStartSLOduration=2.872025508 podStartE2EDuration="4.367778414s" podCreationTimestamp="2025-12-05 05:53:53 +0000 UTC" firstStartedPulling="2025-12-05 05:53:55.331443136 +0000 UTC m=+1637.568173404" lastFinishedPulling="2025-12-05 05:53:56.827196043 +0000 UTC m=+1639.063926310" observedRunningTime="2025-12-05 05:53:57.360104754 +0000 UTC m=+1639.596835022" watchObservedRunningTime="2025-12-05 05:53:57.367778414 +0000 UTC m=+1639.604508682" Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.474149 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bwv4d"] Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.474548 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bwv4d" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerName="registry-server" containerID="cri-o://6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e" gracePeriod=2 Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.842360 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.927756 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-utilities\") pod \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.927981 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-catalog-content\") pod \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.928022 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9c9n\" (UniqueName: \"kubernetes.io/projected/09d8394f-b81b-41b1-8ea9-e8661fa604d2-kube-api-access-j9c9n\") pod \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\" (UID: \"09d8394f-b81b-41b1-8ea9-e8661fa604d2\") " Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.928359 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-utilities" (OuterVolumeSpecName: "utilities") pod "09d8394f-b81b-41b1-8ea9-e8661fa604d2" (UID: "09d8394f-b81b-41b1-8ea9-e8661fa604d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.928735 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:53:58 crc kubenswrapper[4652]: I1205 05:53:58.932386 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09d8394f-b81b-41b1-8ea9-e8661fa604d2-kube-api-access-j9c9n" (OuterVolumeSpecName: "kube-api-access-j9c9n") pod "09d8394f-b81b-41b1-8ea9-e8661fa604d2" (UID: "09d8394f-b81b-41b1-8ea9-e8661fa604d2"). InnerVolumeSpecName "kube-api-access-j9c9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.017379 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09d8394f-b81b-41b1-8ea9-e8661fa604d2" (UID: "09d8394f-b81b-41b1-8ea9-e8661fa604d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.024914 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-qqgfk"] Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.030697 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09d8394f-b81b-41b1-8ea9-e8661fa604d2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.030722 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9c9n\" (UniqueName: \"kubernetes.io/projected/09d8394f-b81b-41b1-8ea9-e8661fa604d2-kube-api-access-j9c9n\") on node \"crc\" DevicePath \"\"" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.033348 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-qqgfk"] Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.370182 4652 generic.go:334] "Generic (PLEG): container finished" podID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerID="6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e" exitCode=0 Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.370223 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwv4d" event={"ID":"09d8394f-b81b-41b1-8ea9-e8661fa604d2","Type":"ContainerDied","Data":"6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e"} Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.370238 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bwv4d" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.370255 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwv4d" event={"ID":"09d8394f-b81b-41b1-8ea9-e8661fa604d2","Type":"ContainerDied","Data":"77dcafc2e25c4b77efda9f8a0f0874d00481d4f3ce7f248a908f8bee20ca229c"} Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.370272 4652 scope.go:117] "RemoveContainer" containerID="6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.392320 4652 scope.go:117] "RemoveContainer" containerID="3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.395567 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bwv4d"] Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.402893 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bwv4d"] Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.420447 4652 scope.go:117] "RemoveContainer" containerID="7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.442598 4652 scope.go:117] "RemoveContainer" containerID="6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e" Dec 05 05:53:59 crc kubenswrapper[4652]: E1205 05:53:59.443018 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e\": container with ID starting with 6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e not found: ID does not exist" containerID="6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.443062 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e"} err="failed to get container status \"6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e\": rpc error: code = NotFound desc = could not find container \"6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e\": container with ID starting with 6c9861ac22687ff95b1dcdacbc49e7a870c4e8df2b4f59e0fff2797d2b6bfd5e not found: ID does not exist" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.443086 4652 scope.go:117] "RemoveContainer" containerID="3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af" Dec 05 05:53:59 crc kubenswrapper[4652]: E1205 05:53:59.443452 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af\": container with ID starting with 3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af not found: ID does not exist" containerID="3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.443501 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af"} err="failed to get container status \"3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af\": rpc error: code = NotFound desc = could not find container \"3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af\": container with ID starting with 3065674348b8a90fd2e2af90befb5b1aeab5e066143fdd8b9a60814da1f797af not found: ID does not exist" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.443526 4652 scope.go:117] "RemoveContainer" containerID="7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648" Dec 05 05:53:59 crc kubenswrapper[4652]: E1205 05:53:59.443813 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648\": container with ID starting with 7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648 not found: ID does not exist" containerID="7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648" Dec 05 05:53:59 crc kubenswrapper[4652]: I1205 05:53:59.443842 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648"} err="failed to get container status \"7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648\": rpc error: code = NotFound desc = could not find container \"7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648\": container with ID starting with 7ac67242fe5a3684be457492b35ac17284ffb903c6259fef696c9c8e14269648 not found: ID does not exist" Dec 05 05:54:00 crc kubenswrapper[4652]: I1205 05:54:00.133951 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" path="/var/lib/kubelet/pods/09d8394f-b81b-41b1-8ea9-e8661fa604d2/volumes" Dec 05 05:54:00 crc kubenswrapper[4652]: I1205 05:54:00.134854 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e26f824e-a877-4436-bca3-8ecdb1d1a73c" path="/var/lib/kubelet/pods/e26f824e-a877-4436-bca3-8ecdb1d1a73c/volumes" Dec 05 05:54:03 crc kubenswrapper[4652]: I1205 05:54:03.600273 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:54:03 crc kubenswrapper[4652]: I1205 05:54:03.600680 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:54:03 crc kubenswrapper[4652]: I1205 05:54:03.632593 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:54:04 crc kubenswrapper[4652]: I1205 05:54:04.029108 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-kt5bg"] Dec 05 05:54:04 crc kubenswrapper[4652]: I1205 05:54:04.036381 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-kt5bg"] Dec 05 05:54:04 crc kubenswrapper[4652]: I1205 05:54:04.135852 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dac210ca-32b5-43af-b85e-4eb7ae57e9d5" path="/var/lib/kubelet/pods/dac210ca-32b5-43af-b85e-4eb7ae57e9d5/volumes" Dec 05 05:54:04 crc kubenswrapper[4652]: I1205 05:54:04.442533 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:54:04 crc kubenswrapper[4652]: I1205 05:54:04.475665 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bd6fv"] Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.426855 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bd6fv" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerName="registry-server" containerID="cri-o://5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2" gracePeriod=2 Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.775679 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.855606 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-utilities\") pod \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.855727 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kckfq\" (UniqueName: \"kubernetes.io/projected/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-kube-api-access-kckfq\") pod \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.855861 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-catalog-content\") pod \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\" (UID: \"3d3f5e89-74d5-4186-8b1e-bc33a58c7970\") " Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.856238 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-utilities" (OuterVolumeSpecName: "utilities") pod "3d3f5e89-74d5-4186-8b1e-bc33a58c7970" (UID: "3d3f5e89-74d5-4186-8b1e-bc33a58c7970"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.860091 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-kube-api-access-kckfq" (OuterVolumeSpecName: "kube-api-access-kckfq") pod "3d3f5e89-74d5-4186-8b1e-bc33a58c7970" (UID: "3d3f5e89-74d5-4186-8b1e-bc33a58c7970"). InnerVolumeSpecName "kube-api-access-kckfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.886663 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d3f5e89-74d5-4186-8b1e-bc33a58c7970" (UID: "3d3f5e89-74d5-4186-8b1e-bc33a58c7970"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.957180 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.957351 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:06 crc kubenswrapper[4652]: I1205 05:54:06.957361 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kckfq\" (UniqueName: \"kubernetes.io/projected/3d3f5e89-74d5-4186-8b1e-bc33a58c7970-kube-api-access-kckfq\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.436159 4652 generic.go:334] "Generic (PLEG): container finished" podID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerID="5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2" exitCode=0 Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.436207 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bd6fv" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.437029 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bd6fv" event={"ID":"3d3f5e89-74d5-4186-8b1e-bc33a58c7970","Type":"ContainerDied","Data":"5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2"} Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.437112 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bd6fv" event={"ID":"3d3f5e89-74d5-4186-8b1e-bc33a58c7970","Type":"ContainerDied","Data":"ec024a3ccfa257c7e5f79e27cca7dbad2f8c6cf5708e6b98c6d6da938d8d56ac"} Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.437134 4652 scope.go:117] "RemoveContainer" containerID="5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.454380 4652 scope.go:117] "RemoveContainer" containerID="7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.459463 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bd6fv"] Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.466291 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bd6fv"] Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.472229 4652 scope.go:117] "RemoveContainer" containerID="1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.504094 4652 scope.go:117] "RemoveContainer" containerID="5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2" Dec 05 05:54:07 crc kubenswrapper[4652]: E1205 05:54:07.504582 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2\": container with ID starting with 5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2 not found: ID does not exist" containerID="5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.504615 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2"} err="failed to get container status \"5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2\": rpc error: code = NotFound desc = could not find container \"5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2\": container with ID starting with 5f805244188edd818d3bb662c365d18f3a2dcf36866f1e43138dcf983be775f2 not found: ID does not exist" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.504652 4652 scope.go:117] "RemoveContainer" containerID="7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549" Dec 05 05:54:07 crc kubenswrapper[4652]: E1205 05:54:07.504946 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549\": container with ID starting with 7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549 not found: ID does not exist" containerID="7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.504982 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549"} err="failed to get container status \"7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549\": rpc error: code = NotFound desc = could not find container \"7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549\": container with ID starting with 7955fae0e549d05bf96425f3bdfab4d87fecd9d1498bf33802988d328ce37549 not found: ID does not exist" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.505012 4652 scope.go:117] "RemoveContainer" containerID="1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3" Dec 05 05:54:07 crc kubenswrapper[4652]: E1205 05:54:07.505282 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3\": container with ID starting with 1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3 not found: ID does not exist" containerID="1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3" Dec 05 05:54:07 crc kubenswrapper[4652]: I1205 05:54:07.505307 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3"} err="failed to get container status \"1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3\": rpc error: code = NotFound desc = could not find container \"1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3\": container with ID starting with 1820271d490ea854e2b3ce5eef3c8a295e025b7c3bfb6b3639740447e8be49c3 not found: ID does not exist" Dec 05 05:54:08 crc kubenswrapper[4652]: I1205 05:54:08.135038 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" path="/var/lib/kubelet/pods/3d3f5e89-74d5-4186-8b1e-bc33a58c7970/volumes" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.126059 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:54:09 crc kubenswrapper[4652]: E1205 05:54:09.126377 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.263752 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wjggl"] Dec 05 05:54:09 crc kubenswrapper[4652]: E1205 05:54:09.264112 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerName="registry-server" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.264131 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerName="registry-server" Dec 05 05:54:09 crc kubenswrapper[4652]: E1205 05:54:09.264140 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerName="extract-utilities" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.264146 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerName="extract-utilities" Dec 05 05:54:09 crc kubenswrapper[4652]: E1205 05:54:09.264164 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerName="extract-content" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.264171 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerName="extract-content" Dec 05 05:54:09 crc kubenswrapper[4652]: E1205 05:54:09.264180 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerName="extract-content" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.264185 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerName="extract-content" Dec 05 05:54:09 crc kubenswrapper[4652]: E1205 05:54:09.264205 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerName="extract-utilities" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.264210 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerName="extract-utilities" Dec 05 05:54:09 crc kubenswrapper[4652]: E1205 05:54:09.264222 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerName="registry-server" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.264227 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerName="registry-server" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.264401 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="09d8394f-b81b-41b1-8ea9-e8661fa604d2" containerName="registry-server" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.264422 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d3f5e89-74d5-4186-8b1e-bc33a58c7970" containerName="registry-server" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.265772 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.277031 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wjggl"] Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.399726 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz6tx\" (UniqueName: \"kubernetes.io/projected/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-kube-api-access-bz6tx\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.399897 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-utilities\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.400248 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-catalog-content\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.501777 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-catalog-content\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.501922 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz6tx\" (UniqueName: \"kubernetes.io/projected/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-kube-api-access-bz6tx\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.502001 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-utilities\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.502349 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-catalog-content\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.502356 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-utilities\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.518282 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz6tx\" (UniqueName: \"kubernetes.io/projected/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-kube-api-access-bz6tx\") pod \"community-operators-wjggl\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.580991 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:09 crc kubenswrapper[4652]: I1205 05:54:09.992284 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wjggl"] Dec 05 05:54:10 crc kubenswrapper[4652]: I1205 05:54:10.458030 4652 generic.go:334] "Generic (PLEG): container finished" podID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerID="502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd" exitCode=0 Dec 05 05:54:10 crc kubenswrapper[4652]: I1205 05:54:10.458069 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjggl" event={"ID":"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6","Type":"ContainerDied","Data":"502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd"} Dec 05 05:54:10 crc kubenswrapper[4652]: I1205 05:54:10.458218 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjggl" event={"ID":"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6","Type":"ContainerStarted","Data":"9da97db4397ac3e0ed8ac5630a2be8ff5a45014975acde5238ce5d2cd176c0fb"} Dec 05 05:54:11 crc kubenswrapper[4652]: I1205 05:54:11.466120 4652 generic.go:334] "Generic (PLEG): container finished" podID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerID="4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0" exitCode=0 Dec 05 05:54:11 crc kubenswrapper[4652]: I1205 05:54:11.466207 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjggl" event={"ID":"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6","Type":"ContainerDied","Data":"4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0"} Dec 05 05:54:12 crc kubenswrapper[4652]: I1205 05:54:12.475300 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjggl" event={"ID":"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6","Type":"ContainerStarted","Data":"c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416"} Dec 05 05:54:12 crc kubenswrapper[4652]: I1205 05:54:12.492609 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wjggl" podStartSLOduration=1.9836520229999999 podStartE2EDuration="3.492595452s" podCreationTimestamp="2025-12-05 05:54:09 +0000 UTC" firstStartedPulling="2025-12-05 05:54:10.459447869 +0000 UTC m=+1652.696178136" lastFinishedPulling="2025-12-05 05:54:11.968391298 +0000 UTC m=+1654.205121565" observedRunningTime="2025-12-05 05:54:12.487393651 +0000 UTC m=+1654.724123918" watchObservedRunningTime="2025-12-05 05:54:12.492595452 +0000 UTC m=+1654.729325720" Dec 05 05:54:19 crc kubenswrapper[4652]: I1205 05:54:19.582231 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:19 crc kubenswrapper[4652]: I1205 05:54:19.582650 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:19 crc kubenswrapper[4652]: I1205 05:54:19.612412 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:20 crc kubenswrapper[4652]: I1205 05:54:20.559540 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:20 crc kubenswrapper[4652]: I1205 05:54:20.597688 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wjggl"] Dec 05 05:54:21 crc kubenswrapper[4652]: I1205 05:54:21.022473 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-pvs56"] Dec 05 05:54:21 crc kubenswrapper[4652]: I1205 05:54:21.030143 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-pvs56"] Dec 05 05:54:22 crc kubenswrapper[4652]: I1205 05:54:22.125805 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:54:22 crc kubenswrapper[4652]: E1205 05:54:22.126013 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:54:22 crc kubenswrapper[4652]: I1205 05:54:22.135157 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="298ca280-89e1-42fc-8d46-b63f6588896f" path="/var/lib/kubelet/pods/298ca280-89e1-42fc-8d46-b63f6588896f/volumes" Dec 05 05:54:22 crc kubenswrapper[4652]: I1205 05:54:22.543247 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wjggl" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerName="registry-server" containerID="cri-o://c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416" gracePeriod=2 Dec 05 05:54:22 crc kubenswrapper[4652]: I1205 05:54:22.908317 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.027928 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-catalog-content\") pod \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.027973 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz6tx\" (UniqueName: \"kubernetes.io/projected/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-kube-api-access-bz6tx\") pod \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.028057 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-utilities\") pod \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\" (UID: \"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6\") " Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.028906 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-utilities" (OuterVolumeSpecName: "utilities") pod "95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" (UID: "95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.032526 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-kube-api-access-bz6tx" (OuterVolumeSpecName: "kube-api-access-bz6tx") pod "95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" (UID: "95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6"). InnerVolumeSpecName "kube-api-access-bz6tx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.066302 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" (UID: "95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.129941 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.129967 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz6tx\" (UniqueName: \"kubernetes.io/projected/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-kube-api-access-bz6tx\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.129980 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.552930 4652 generic.go:334] "Generic (PLEG): container finished" podID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerID="c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416" exitCode=0 Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.552988 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjggl" event={"ID":"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6","Type":"ContainerDied","Data":"c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416"} Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.553057 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wjggl" event={"ID":"95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6","Type":"ContainerDied","Data":"9da97db4397ac3e0ed8ac5630a2be8ff5a45014975acde5238ce5d2cd176c0fb"} Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.553078 4652 scope.go:117] "RemoveContainer" containerID="c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.553271 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wjggl" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.569645 4652 scope.go:117] "RemoveContainer" containerID="4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.578367 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wjggl"] Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.584942 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wjggl"] Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.587876 4652 scope.go:117] "RemoveContainer" containerID="502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.623985 4652 scope.go:117] "RemoveContainer" containerID="c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416" Dec 05 05:54:23 crc kubenswrapper[4652]: E1205 05:54:23.624401 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416\": container with ID starting with c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416 not found: ID does not exist" containerID="c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.624433 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416"} err="failed to get container status \"c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416\": rpc error: code = NotFound desc = could not find container \"c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416\": container with ID starting with c2ff0956dc686595c66a5bf7e38f2fe7fa6096973f199609a8f066ae553e8416 not found: ID does not exist" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.624451 4652 scope.go:117] "RemoveContainer" containerID="4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0" Dec 05 05:54:23 crc kubenswrapper[4652]: E1205 05:54:23.624942 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0\": container with ID starting with 4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0 not found: ID does not exist" containerID="4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.624973 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0"} err="failed to get container status \"4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0\": rpc error: code = NotFound desc = could not find container \"4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0\": container with ID starting with 4409867dacf06ab5134c22d7e35aad1a666792c95a3211626d7525ff1eed70f0 not found: ID does not exist" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.624995 4652 scope.go:117] "RemoveContainer" containerID="502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd" Dec 05 05:54:23 crc kubenswrapper[4652]: E1205 05:54:23.625296 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd\": container with ID starting with 502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd not found: ID does not exist" containerID="502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd" Dec 05 05:54:23 crc kubenswrapper[4652]: I1205 05:54:23.625331 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd"} err="failed to get container status \"502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd\": rpc error: code = NotFound desc = could not find container \"502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd\": container with ID starting with 502958c277c4ba47455ac70e9770b18268ef6dc2052c73c87a55e8d63bd965cd not found: ID does not exist" Dec 05 05:54:24 crc kubenswrapper[4652]: I1205 05:54:24.134615 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" path="/var/lib/kubelet/pods/95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6/volumes" Dec 05 05:54:33 crc kubenswrapper[4652]: I1205 05:54:33.126129 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:54:33 crc kubenswrapper[4652]: E1205 05:54:33.126642 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.116362 4652 scope.go:117] "RemoveContainer" containerID="96330fc777e64b0833ca0608897222452c2148036c2941fc7d4d3a5924ccbc59" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.135451 4652 scope.go:117] "RemoveContainer" containerID="cc9549f1668c5d9c63a3650a583b150712103f246970d896899fbb3c64173558" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.178834 4652 scope.go:117] "RemoveContainer" containerID="c0b012eb8c3df1d4d7870965f6902a4bab7e40c41d981de93b70a5d2402a39ad" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.206621 4652 scope.go:117] "RemoveContainer" containerID="5088cc16b763bf7ef0d72a42412d52ca8415fe59b0a48f99f1d403503a2af8aa" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.246508 4652 scope.go:117] "RemoveContainer" containerID="c42c033d68265a364046ab3458f9e00eecd253cd56c7283ab00c497fbea7581d" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.273937 4652 scope.go:117] "RemoveContainer" containerID="691fab56455a90b3527c40ebd18fc5a2cc3dee28cb094719a707fd282eef9ae5" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.311870 4652 scope.go:117] "RemoveContainer" containerID="e3ed6116a8553d565b9fb4a1ae29030bf059c99a97f7bbe6edce868745878467" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.327853 4652 scope.go:117] "RemoveContainer" containerID="bfe0566958b046c824313ca92d235673089c8b3679c8dba3f106865946349d95" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.343640 4652 scope.go:117] "RemoveContainer" containerID="734da4c19af18d37037b644049892190f897bf593ca0e8704a1a412ecba05d67" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.359835 4652 scope.go:117] "RemoveContainer" containerID="9ce137261b4e7f177aea5210c74fbccafb01bc775eac6725564ca2e7f0580511" Dec 05 05:54:40 crc kubenswrapper[4652]: I1205 05:54:40.374946 4652 scope.go:117] "RemoveContainer" containerID="cb5be2d2c84f876e7ccc1277ecdf62e96bf1d2f00169d5521a41420418deeb17" Dec 05 05:54:41 crc kubenswrapper[4652]: I1205 05:54:41.692387 4652 generic.go:334] "Generic (PLEG): container finished" podID="91378bb9-4654-44bb-9162-220068b36036" containerID="87ea10d9ddd43e559addad712afec3c7c69eea0efa50d0878abdc5b8a3452534" exitCode=0 Dec 05 05:54:41 crc kubenswrapper[4652]: I1205 05:54:41.692473 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" event={"ID":"91378bb9-4654-44bb-9162-220068b36036","Type":"ContainerDied","Data":"87ea10d9ddd43e559addad712afec3c7c69eea0efa50d0878abdc5b8a3452534"} Dec 05 05:54:42 crc kubenswrapper[4652]: I1205 05:54:42.998396 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.033065 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-wqg7s"] Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.041222 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-mlkhj"] Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.047830 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-mlkhj"] Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.053031 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-wqg7s"] Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.152480 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-inventory\") pod \"91378bb9-4654-44bb-9162-220068b36036\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.152601 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6b9d\" (UniqueName: \"kubernetes.io/projected/91378bb9-4654-44bb-9162-220068b36036-kube-api-access-q6b9d\") pod \"91378bb9-4654-44bb-9162-220068b36036\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.152624 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-ssh-key\") pod \"91378bb9-4654-44bb-9162-220068b36036\" (UID: \"91378bb9-4654-44bb-9162-220068b36036\") " Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.156899 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91378bb9-4654-44bb-9162-220068b36036-kube-api-access-q6b9d" (OuterVolumeSpecName: "kube-api-access-q6b9d") pod "91378bb9-4654-44bb-9162-220068b36036" (UID: "91378bb9-4654-44bb-9162-220068b36036"). InnerVolumeSpecName "kube-api-access-q6b9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.173385 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-inventory" (OuterVolumeSpecName: "inventory") pod "91378bb9-4654-44bb-9162-220068b36036" (UID: "91378bb9-4654-44bb-9162-220068b36036"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.174013 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "91378bb9-4654-44bb-9162-220068b36036" (UID: "91378bb9-4654-44bb-9162-220068b36036"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.255077 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6b9d\" (UniqueName: \"kubernetes.io/projected/91378bb9-4654-44bb-9162-220068b36036-kube-api-access-q6b9d\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.255098 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.255107 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91378bb9-4654-44bb-9162-220068b36036-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.711907 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" event={"ID":"91378bb9-4654-44bb-9162-220068b36036","Type":"ContainerDied","Data":"00375153764a5bcb51cb18e8b7d4bbb67410ff4af6c48b604bd19e60c171fd4d"} Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.712118 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00375153764a5bcb51cb18e8b7d4bbb67410ff4af6c48b604bd19e60c171fd4d" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.711970 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.761662 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q"] Dec 05 05:54:43 crc kubenswrapper[4652]: E1205 05:54:43.762023 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91378bb9-4654-44bb-9162-220068b36036" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.762042 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="91378bb9-4654-44bb-9162-220068b36036" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 05:54:43 crc kubenswrapper[4652]: E1205 05:54:43.762077 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerName="extract-utilities" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.762084 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerName="extract-utilities" Dec 05 05:54:43 crc kubenswrapper[4652]: E1205 05:54:43.762102 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerName="registry-server" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.762109 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerName="registry-server" Dec 05 05:54:43 crc kubenswrapper[4652]: E1205 05:54:43.762125 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerName="extract-content" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.762131 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerName="extract-content" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.762317 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="91378bb9-4654-44bb-9162-220068b36036" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.762357 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="95a99bfd-2c46-4ebd-9e54-a9bc720dc6e6" containerName="registry-server" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.763009 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.764748 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.765133 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.765223 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.769578 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.772270 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q"] Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.865304 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k62pl\" (UniqueName: \"kubernetes.io/projected/411b62a2-a40e-44ab-adff-aa570ec06501-kube-api-access-k62pl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.865342 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.865535 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.967538 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.967891 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k62pl\" (UniqueName: \"kubernetes.io/projected/411b62a2-a40e-44ab-adff-aa570ec06501-kube-api-access-k62pl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.968019 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.971761 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.973303 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:43 crc kubenswrapper[4652]: I1205 05:54:43.982382 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k62pl\" (UniqueName: \"kubernetes.io/projected/411b62a2-a40e-44ab-adff-aa570ec06501-kube-api-access-k62pl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pws2q\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:44 crc kubenswrapper[4652]: I1205 05:54:44.025926 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-ghfhc"] Dec 05 05:54:44 crc kubenswrapper[4652]: I1205 05:54:44.035948 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-ghfhc"] Dec 05 05:54:44 crc kubenswrapper[4652]: I1205 05:54:44.081656 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:54:44 crc kubenswrapper[4652]: I1205 05:54:44.138542 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3415917b-2730-494b-b474-b1bd9c1d08f5" path="/var/lib/kubelet/pods/3415917b-2730-494b-b474-b1bd9c1d08f5/volumes" Dec 05 05:54:44 crc kubenswrapper[4652]: I1205 05:54:44.139238 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c70e1c4-c49c-4cb6-adec-0173ebe53d17" path="/var/lib/kubelet/pods/5c70e1c4-c49c-4cb6-adec-0173ebe53d17/volumes" Dec 05 05:54:44 crc kubenswrapper[4652]: I1205 05:54:44.139800 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86c35465-1240-412c-9182-99d8ed10f948" path="/var/lib/kubelet/pods/86c35465-1240-412c-9182-99d8ed10f948/volumes" Dec 05 05:54:44 crc kubenswrapper[4652]: I1205 05:54:44.507205 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q"] Dec 05 05:54:44 crc kubenswrapper[4652]: I1205 05:54:44.720260 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" event={"ID":"411b62a2-a40e-44ab-adff-aa570ec06501","Type":"ContainerStarted","Data":"7b0a10c77d3ea2a53afa1df1b9e0f35b6eea11061493d3a9b79b5760f720e2fc"} Dec 05 05:54:45 crc kubenswrapper[4652]: I1205 05:54:45.729654 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" event={"ID":"411b62a2-a40e-44ab-adff-aa570ec06501","Type":"ContainerStarted","Data":"2ab15dd8e0e5413ab7c5a3011c5698fbe7951bf1dac210893c081aaaa254e40f"} Dec 05 05:54:45 crc kubenswrapper[4652]: I1205 05:54:45.746880 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" podStartSLOduration=2.150949721 podStartE2EDuration="2.746867616s" podCreationTimestamp="2025-12-05 05:54:43 +0000 UTC" firstStartedPulling="2025-12-05 05:54:44.510627828 +0000 UTC m=+1686.747358086" lastFinishedPulling="2025-12-05 05:54:45.106545714 +0000 UTC m=+1687.343275981" observedRunningTime="2025-12-05 05:54:45.739971198 +0000 UTC m=+1687.976701466" watchObservedRunningTime="2025-12-05 05:54:45.746867616 +0000 UTC m=+1687.983597884" Dec 05 05:54:47 crc kubenswrapper[4652]: I1205 05:54:47.125541 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:54:47 crc kubenswrapper[4652]: E1205 05:54:47.126093 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:54:53 crc kubenswrapper[4652]: I1205 05:54:53.020040 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-nmg5q"] Dec 05 05:54:53 crc kubenswrapper[4652]: I1205 05:54:53.026246 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-nmg5q"] Dec 05 05:54:54 crc kubenswrapper[4652]: I1205 05:54:54.133951 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1e961f4-2398-4a5e-a424-e8066a6a7c78" path="/var/lib/kubelet/pods/a1e961f4-2398-4a5e-a424-e8066a6a7c78/volumes" Dec 05 05:55:01 crc kubenswrapper[4652]: I1205 05:55:01.029362 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-xcf4m"] Dec 05 05:55:01 crc kubenswrapper[4652]: I1205 05:55:01.051100 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-xcf4m"] Dec 05 05:55:01 crc kubenswrapper[4652]: I1205 05:55:01.126534 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:55:01 crc kubenswrapper[4652]: E1205 05:55:01.126806 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:55:02 crc kubenswrapper[4652]: I1205 05:55:02.133535 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="266e7065-7af6-4547-b7bb-5e981e095969" path="/var/lib/kubelet/pods/266e7065-7af6-4547-b7bb-5e981e095969/volumes" Dec 05 05:55:14 crc kubenswrapper[4652]: I1205 05:55:14.125738 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:55:14 crc kubenswrapper[4652]: E1205 05:55:14.126379 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:55:26 crc kubenswrapper[4652]: I1205 05:55:26.126414 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:55:26 crc kubenswrapper[4652]: E1205 05:55:26.127093 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.022220 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-hg5sm"] Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.027952 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-rjhbt"] Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.035294 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-rjhbt"] Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.043131 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-hg5sm"] Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.049868 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-tj9c4"] Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.056414 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-7ac3-account-create-update-mp544"] Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.061883 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-tj9c4"] Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.067250 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-7ac3-account-create-update-mp544"] Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.144431 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eb04f8f-6137-4976-b83c-61694b5e34bb" path="/var/lib/kubelet/pods/6eb04f8f-6137-4976-b83c-61694b5e34bb/volumes" Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.145130 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7620a38c-f020-46b9-b5b7-34e4d3a0f96b" path="/var/lib/kubelet/pods/7620a38c-f020-46b9-b5b7-34e4d3a0f96b/volumes" Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.145798 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99f60875-ac12-428f-9992-29473a3dfb62" path="/var/lib/kubelet/pods/99f60875-ac12-428f-9992-29473a3dfb62/volumes" Dec 05 05:55:28 crc kubenswrapper[4652]: I1205 05:55:28.146436 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d187d4b2-4a72-4501-bdfc-dcf4808060f5" path="/var/lib/kubelet/pods/d187d4b2-4a72-4501-bdfc-dcf4808060f5/volumes" Dec 05 05:55:29 crc kubenswrapper[4652]: I1205 05:55:29.022625 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d224-account-create-update-8fhpr"] Dec 05 05:55:29 crc kubenswrapper[4652]: I1205 05:55:29.032136 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-0b21-account-create-update-8zzvd"] Dec 05 05:55:29 crc kubenswrapper[4652]: I1205 05:55:29.038735 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-d224-account-create-update-8fhpr"] Dec 05 05:55:29 crc kubenswrapper[4652]: I1205 05:55:29.044813 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-0b21-account-create-update-8zzvd"] Dec 05 05:55:30 crc kubenswrapper[4652]: I1205 05:55:30.133625 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="519311f0-3163-4215-a7b8-cf7302c9e8f8" path="/var/lib/kubelet/pods/519311f0-3163-4215-a7b8-cf7302c9e8f8/volumes" Dec 05 05:55:30 crc kubenswrapper[4652]: I1205 05:55:30.134164 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a19b09de-90b9-4718-8f0b-016f84266f36" path="/var/lib/kubelet/pods/a19b09de-90b9-4718-8f0b-016f84266f36/volumes" Dec 05 05:55:37 crc kubenswrapper[4652]: I1205 05:55:37.126361 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:55:37 crc kubenswrapper[4652]: E1205 05:55:37.127007 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.545810 4652 scope.go:117] "RemoveContainer" containerID="d678705d76ec3eb0fc863b1d1c2025e9489f0b7ed99b478050eab495e14781a8" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.575870 4652 scope.go:117] "RemoveContainer" containerID="1b4b8a26b5fb42022c7f44aee8c181036979d5fd24204b20d1e46afda9363848" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.606449 4652 scope.go:117] "RemoveContainer" containerID="5197c73104627b9852dd8720560d7a36be572929880f88a4c8e9d9cb6d23ee93" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.637147 4652 scope.go:117] "RemoveContainer" containerID="8006bf94bf3ddafe44bd37a89de67ebb272ba4214a0482617f43e4b803ea15a2" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.663036 4652 scope.go:117] "RemoveContainer" containerID="b076ceaec9e7bdc2a7d3bfb34652c27b93881bbef018e710e411b902720ed3ce" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.708673 4652 scope.go:117] "RemoveContainer" containerID="f16e2f4bfc579d4b966a1f43005c195b5a798be9b6f0f93fd05feff323b6a443" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.737733 4652 scope.go:117] "RemoveContainer" containerID="50512b1fb91f5a6515acdf4ab562dc184bf24b33aa90ab89026328fe0db1b95f" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.753313 4652 scope.go:117] "RemoveContainer" containerID="dee17f54db89f2555885f37c6af39842f0a2d842c75518c039d709c20bd3b5a9" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.770651 4652 scope.go:117] "RemoveContainer" containerID="336a971cfb95c60fa6c21060cd5cd46635c92ff06669ea0717d368571f4883de" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.791945 4652 scope.go:117] "RemoveContainer" containerID="5970cba1ccb5e1474abcdb6fe4ea8b97113ff5d8cd118999bc307f0761a8d80f" Dec 05 05:55:40 crc kubenswrapper[4652]: I1205 05:55:40.806752 4652 scope.go:117] "RemoveContainer" containerID="18e73cca34687422e4ace6d76a079a2aa54f555c15fb566b0a6ebc0c74f5bb24" Dec 05 05:55:47 crc kubenswrapper[4652]: I1205 05:55:47.154467 4652 generic.go:334] "Generic (PLEG): container finished" podID="411b62a2-a40e-44ab-adff-aa570ec06501" containerID="2ab15dd8e0e5413ab7c5a3011c5698fbe7951bf1dac210893c081aaaa254e40f" exitCode=0 Dec 05 05:55:47 crc kubenswrapper[4652]: I1205 05:55:47.154565 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" event={"ID":"411b62a2-a40e-44ab-adff-aa570ec06501","Type":"ContainerDied","Data":"2ab15dd8e0e5413ab7c5a3011c5698fbe7951bf1dac210893c081aaaa254e40f"} Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.466897 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.536005 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k62pl\" (UniqueName: \"kubernetes.io/projected/411b62a2-a40e-44ab-adff-aa570ec06501-kube-api-access-k62pl\") pod \"411b62a2-a40e-44ab-adff-aa570ec06501\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.536167 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-inventory\") pod \"411b62a2-a40e-44ab-adff-aa570ec06501\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.536228 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-ssh-key\") pod \"411b62a2-a40e-44ab-adff-aa570ec06501\" (UID: \"411b62a2-a40e-44ab-adff-aa570ec06501\") " Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.541137 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/411b62a2-a40e-44ab-adff-aa570ec06501-kube-api-access-k62pl" (OuterVolumeSpecName: "kube-api-access-k62pl") pod "411b62a2-a40e-44ab-adff-aa570ec06501" (UID: "411b62a2-a40e-44ab-adff-aa570ec06501"). InnerVolumeSpecName "kube-api-access-k62pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.558725 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-inventory" (OuterVolumeSpecName: "inventory") pod "411b62a2-a40e-44ab-adff-aa570ec06501" (UID: "411b62a2-a40e-44ab-adff-aa570ec06501"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.558949 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "411b62a2-a40e-44ab-adff-aa570ec06501" (UID: "411b62a2-a40e-44ab-adff-aa570ec06501"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.639428 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k62pl\" (UniqueName: \"kubernetes.io/projected/411b62a2-a40e-44ab-adff-aa570ec06501-kube-api-access-k62pl\") on node \"crc\" DevicePath \"\"" Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.639465 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:55:48 crc kubenswrapper[4652]: I1205 05:55:48.639474 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/411b62a2-a40e-44ab-adff-aa570ec06501-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.126326 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:55:49 crc kubenswrapper[4652]: E1205 05:55:49.126541 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.170251 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" event={"ID":"411b62a2-a40e-44ab-adff-aa570ec06501","Type":"ContainerDied","Data":"7b0a10c77d3ea2a53afa1df1b9e0f35b6eea11061493d3a9b79b5760f720e2fc"} Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.170281 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pws2q" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.170290 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b0a10c77d3ea2a53afa1df1b9e0f35b6eea11061493d3a9b79b5760f720e2fc" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.233749 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl"] Dec 05 05:55:49 crc kubenswrapper[4652]: E1205 05:55:49.234231 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="411b62a2-a40e-44ab-adff-aa570ec06501" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.234253 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="411b62a2-a40e-44ab-adff-aa570ec06501" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.234474 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="411b62a2-a40e-44ab-adff-aa570ec06501" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.235162 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.236844 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.237114 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.237252 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.238312 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.245826 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl"] Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.355315 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk6tp\" (UniqueName: \"kubernetes.io/projected/1e9468f7-9fcc-48bd-9745-af6990cb4091-kube-api-access-nk6tp\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.355372 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.355401 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.457767 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk6tp\" (UniqueName: \"kubernetes.io/projected/1e9468f7-9fcc-48bd-9745-af6990cb4091-kube-api-access-nk6tp\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.457819 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.457841 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.463247 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.463759 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.473217 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk6tp\" (UniqueName: \"kubernetes.io/projected/1e9468f7-9fcc-48bd-9745-af6990cb4091-kube-api-access-nk6tp\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-st6kl\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.549583 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:49 crc kubenswrapper[4652]: I1205 05:55:49.976202 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl"] Dec 05 05:55:50 crc kubenswrapper[4652]: I1205 05:55:50.177992 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" event={"ID":"1e9468f7-9fcc-48bd-9745-af6990cb4091","Type":"ContainerStarted","Data":"8f5003b0ebc421db38c07509b57772c8b961c17fe7f1c37bac9290da15f3dac3"} Dec 05 05:55:51 crc kubenswrapper[4652]: I1205 05:55:51.185745 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" event={"ID":"1e9468f7-9fcc-48bd-9745-af6990cb4091","Type":"ContainerStarted","Data":"9f4cfdb3023764b1ab86df347383f8c717af44d80ad0440bceadf40aee9310e7"} Dec 05 05:55:51 crc kubenswrapper[4652]: I1205 05:55:51.207522 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" podStartSLOduration=1.556894658 podStartE2EDuration="2.207496644s" podCreationTimestamp="2025-12-05 05:55:49 +0000 UTC" firstStartedPulling="2025-12-05 05:55:49.981300902 +0000 UTC m=+1752.218031169" lastFinishedPulling="2025-12-05 05:55:50.631902887 +0000 UTC m=+1752.868633155" observedRunningTime="2025-12-05 05:55:51.202550102 +0000 UTC m=+1753.439280369" watchObservedRunningTime="2025-12-05 05:55:51.207496644 +0000 UTC m=+1753.444226910" Dec 05 05:55:55 crc kubenswrapper[4652]: I1205 05:55:55.212874 4652 generic.go:334] "Generic (PLEG): container finished" podID="1e9468f7-9fcc-48bd-9745-af6990cb4091" containerID="9f4cfdb3023764b1ab86df347383f8c717af44d80ad0440bceadf40aee9310e7" exitCode=0 Dec 05 05:55:55 crc kubenswrapper[4652]: I1205 05:55:55.212944 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" event={"ID":"1e9468f7-9fcc-48bd-9745-af6990cb4091","Type":"ContainerDied","Data":"9f4cfdb3023764b1ab86df347383f8c717af44d80ad0440bceadf40aee9310e7"} Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.029628 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cmts"] Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.036365 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cmts"] Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.135400 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3516f65d-a205-4768-b345-cee580a16b09" path="/var/lib/kubelet/pods/3516f65d-a205-4768-b345-cee580a16b09/volumes" Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.547587 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.695658 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-ssh-key\") pod \"1e9468f7-9fcc-48bd-9745-af6990cb4091\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.695739 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nk6tp\" (UniqueName: \"kubernetes.io/projected/1e9468f7-9fcc-48bd-9745-af6990cb4091-kube-api-access-nk6tp\") pod \"1e9468f7-9fcc-48bd-9745-af6990cb4091\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.695957 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-inventory\") pod \"1e9468f7-9fcc-48bd-9745-af6990cb4091\" (UID: \"1e9468f7-9fcc-48bd-9745-af6990cb4091\") " Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.702658 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e9468f7-9fcc-48bd-9745-af6990cb4091-kube-api-access-nk6tp" (OuterVolumeSpecName: "kube-api-access-nk6tp") pod "1e9468f7-9fcc-48bd-9745-af6990cb4091" (UID: "1e9468f7-9fcc-48bd-9745-af6990cb4091"). InnerVolumeSpecName "kube-api-access-nk6tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.717365 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1e9468f7-9fcc-48bd-9745-af6990cb4091" (UID: "1e9468f7-9fcc-48bd-9745-af6990cb4091"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.721309 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-inventory" (OuterVolumeSpecName: "inventory") pod "1e9468f7-9fcc-48bd-9745-af6990cb4091" (UID: "1e9468f7-9fcc-48bd-9745-af6990cb4091"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.798099 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.798132 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1e9468f7-9fcc-48bd-9745-af6990cb4091-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:55:56 crc kubenswrapper[4652]: I1205 05:55:56.798142 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nk6tp\" (UniqueName: \"kubernetes.io/projected/1e9468f7-9fcc-48bd-9745-af6990cb4091-kube-api-access-nk6tp\") on node \"crc\" DevicePath \"\"" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.232492 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" event={"ID":"1e9468f7-9fcc-48bd-9745-af6990cb4091","Type":"ContainerDied","Data":"8f5003b0ebc421db38c07509b57772c8b961c17fe7f1c37bac9290da15f3dac3"} Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.232539 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f5003b0ebc421db38c07509b57772c8b961c17fe7f1c37bac9290da15f3dac3" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.232547 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-st6kl" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.284713 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r"] Dec 05 05:55:57 crc kubenswrapper[4652]: E1205 05:55:57.285134 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e9468f7-9fcc-48bd-9745-af6990cb4091" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.285154 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e9468f7-9fcc-48bd-9745-af6990cb4091" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.285345 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e9468f7-9fcc-48bd-9745-af6990cb4091" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.285946 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.287321 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.287631 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.289207 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.291986 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.292449 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r"] Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.409368 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lrn7\" (UniqueName: \"kubernetes.io/projected/49a80212-3f70-4629-b85e-a7a677a9e72e-kube-api-access-7lrn7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.409595 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.409650 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.510698 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lrn7\" (UniqueName: \"kubernetes.io/projected/49a80212-3f70-4629-b85e-a7a677a9e72e-kube-api-access-7lrn7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.510861 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.510904 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.513777 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.514399 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.525629 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lrn7\" (UniqueName: \"kubernetes.io/projected/49a80212-3f70-4629-b85e-a7a677a9e72e-kube-api-access-7lrn7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-8282r\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:57 crc kubenswrapper[4652]: I1205 05:55:57.607578 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:55:58 crc kubenswrapper[4652]: I1205 05:55:58.029172 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r"] Dec 05 05:55:58 crc kubenswrapper[4652]: I1205 05:55:58.240344 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" event={"ID":"49a80212-3f70-4629-b85e-a7a677a9e72e","Type":"ContainerStarted","Data":"d0effabfdc92182d90dabe1eaeb4747e8395cd4861879c72dbb3774b0e63bd02"} Dec 05 05:55:59 crc kubenswrapper[4652]: I1205 05:55:59.251827 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" event={"ID":"49a80212-3f70-4629-b85e-a7a677a9e72e","Type":"ContainerStarted","Data":"ee146ada3f52957df669903e368684705fb25c873aed3bc56f81b2bff0e5c4ea"} Dec 05 05:55:59 crc kubenswrapper[4652]: I1205 05:55:59.268144 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" podStartSLOduration=1.725297869 podStartE2EDuration="2.268131585s" podCreationTimestamp="2025-12-05 05:55:57 +0000 UTC" firstStartedPulling="2025-12-05 05:55:58.029612311 +0000 UTC m=+1760.266342578" lastFinishedPulling="2025-12-05 05:55:58.572446037 +0000 UTC m=+1760.809176294" observedRunningTime="2025-12-05 05:55:59.261803998 +0000 UTC m=+1761.498534264" watchObservedRunningTime="2025-12-05 05:55:59.268131585 +0000 UTC m=+1761.504861852" Dec 05 05:56:04 crc kubenswrapper[4652]: I1205 05:56:04.125449 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:56:04 crc kubenswrapper[4652]: E1205 05:56:04.126073 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 05:56:16 crc kubenswrapper[4652]: I1205 05:56:16.036598 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8rdbd"] Dec 05 05:56:16 crc kubenswrapper[4652]: I1205 05:56:16.051510 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-8rdbd"] Dec 05 05:56:16 crc kubenswrapper[4652]: I1205 05:56:16.134506 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d81472ce-02d8-406f-952a-a6196c2770f4" path="/var/lib/kubelet/pods/d81472ce-02d8-406f-952a-a6196c2770f4/volumes" Dec 05 05:56:17 crc kubenswrapper[4652]: I1205 05:56:17.026970 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-8dh2n"] Dec 05 05:56:17 crc kubenswrapper[4652]: I1205 05:56:17.035148 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-8dh2n"] Dec 05 05:56:18 crc kubenswrapper[4652]: I1205 05:56:18.131137 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:56:18 crc kubenswrapper[4652]: I1205 05:56:18.135987 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f49d0a16-f608-4d69-af94-2e84fc4dee10" path="/var/lib/kubelet/pods/f49d0a16-f608-4d69-af94-2e84fc4dee10/volumes" Dec 05 05:56:18 crc kubenswrapper[4652]: I1205 05:56:18.392993 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"e91379ac0dd46adf59e35fc8b2698ec2cbbe53e58b4afec1ae42fc455feb90c9"} Dec 05 05:56:26 crc kubenswrapper[4652]: I1205 05:56:26.455725 4652 generic.go:334] "Generic (PLEG): container finished" podID="49a80212-3f70-4629-b85e-a7a677a9e72e" containerID="ee146ada3f52957df669903e368684705fb25c873aed3bc56f81b2bff0e5c4ea" exitCode=0 Dec 05 05:56:26 crc kubenswrapper[4652]: I1205 05:56:26.455796 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" event={"ID":"49a80212-3f70-4629-b85e-a7a677a9e72e","Type":"ContainerDied","Data":"ee146ada3f52957df669903e368684705fb25c873aed3bc56f81b2bff0e5c4ea"} Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.776910 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.787729 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-ssh-key\") pod \"49a80212-3f70-4629-b85e-a7a677a9e72e\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.787767 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lrn7\" (UniqueName: \"kubernetes.io/projected/49a80212-3f70-4629-b85e-a7a677a9e72e-kube-api-access-7lrn7\") pod \"49a80212-3f70-4629-b85e-a7a677a9e72e\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.792512 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49a80212-3f70-4629-b85e-a7a677a9e72e-kube-api-access-7lrn7" (OuterVolumeSpecName: "kube-api-access-7lrn7") pod "49a80212-3f70-4629-b85e-a7a677a9e72e" (UID: "49a80212-3f70-4629-b85e-a7a677a9e72e"). InnerVolumeSpecName "kube-api-access-7lrn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.814726 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "49a80212-3f70-4629-b85e-a7a677a9e72e" (UID: "49a80212-3f70-4629-b85e-a7a677a9e72e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.889789 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-inventory\") pod \"49a80212-3f70-4629-b85e-a7a677a9e72e\" (UID: \"49a80212-3f70-4629-b85e-a7a677a9e72e\") " Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.890654 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.890673 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lrn7\" (UniqueName: \"kubernetes.io/projected/49a80212-3f70-4629-b85e-a7a677a9e72e-kube-api-access-7lrn7\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.909244 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-inventory" (OuterVolumeSpecName: "inventory") pod "49a80212-3f70-4629-b85e-a7a677a9e72e" (UID: "49a80212-3f70-4629-b85e-a7a677a9e72e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:27 crc kubenswrapper[4652]: I1205 05:56:27.991955 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49a80212-3f70-4629-b85e-a7a677a9e72e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.471502 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" event={"ID":"49a80212-3f70-4629-b85e-a7a677a9e72e","Type":"ContainerDied","Data":"d0effabfdc92182d90dabe1eaeb4747e8395cd4861879c72dbb3774b0e63bd02"} Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.471549 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0effabfdc92182d90dabe1eaeb4747e8395cd4861879c72dbb3774b0e63bd02" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.471576 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-8282r" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.528390 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww"] Dec 05 05:56:28 crc kubenswrapper[4652]: E1205 05:56:28.528790 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49a80212-3f70-4629-b85e-a7a677a9e72e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.528808 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="49a80212-3f70-4629-b85e-a7a677a9e72e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.529010 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="49a80212-3f70-4629-b85e-a7a677a9e72e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.529624 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.531831 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.532146 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.532284 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.534384 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.535888 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww"] Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.602334 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.602455 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97ssr\" (UniqueName: \"kubernetes.io/projected/ea47dfbf-ed1f-4197-9c06-68290a722e2d-kube-api-access-97ssr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.602540 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.704084 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.704178 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97ssr\" (UniqueName: \"kubernetes.io/projected/ea47dfbf-ed1f-4197-9c06-68290a722e2d-kube-api-access-97ssr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.704237 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.707570 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.707824 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.718351 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97ssr\" (UniqueName: \"kubernetes.io/projected/ea47dfbf-ed1f-4197-9c06-68290a722e2d-kube-api-access-97ssr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-vcvww\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:28 crc kubenswrapper[4652]: I1205 05:56:28.844362 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:56:29 crc kubenswrapper[4652]: I1205 05:56:29.325699 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww"] Dec 05 05:56:29 crc kubenswrapper[4652]: I1205 05:56:29.478564 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" event={"ID":"ea47dfbf-ed1f-4197-9c06-68290a722e2d","Type":"ContainerStarted","Data":"1bfef7ce751ee748b3a8e260f55e41696836f342d962d1c603a28e98f4242ce8"} Dec 05 05:56:30 crc kubenswrapper[4652]: I1205 05:56:30.486699 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" event={"ID":"ea47dfbf-ed1f-4197-9c06-68290a722e2d","Type":"ContainerStarted","Data":"1face9adc5222ed276ad2f61aaa4219bbbc8c4f5e3cc2090d98147463e34a90a"} Dec 05 05:56:40 crc kubenswrapper[4652]: I1205 05:56:40.946952 4652 scope.go:117] "RemoveContainer" containerID="8f18516a28495baf1bacb0f34032551481833faf5ab6afce86722d2ada2c23dd" Dec 05 05:56:40 crc kubenswrapper[4652]: I1205 05:56:40.976718 4652 scope.go:117] "RemoveContainer" containerID="de71d8fe70be007e4bd9eb4152ff6dd4bcc12901d836cdd54c25356484e00f05" Dec 05 05:56:41 crc kubenswrapper[4652]: I1205 05:56:41.026933 4652 scope.go:117] "RemoveContainer" containerID="3fe241397942c9e6cb2ef45d388a0401feafaafa09e1f750fe9c4108e5358d32" Dec 05 05:57:04 crc kubenswrapper[4652]: I1205 05:57:04.026169 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" podStartSLOduration=35.540009809 podStartE2EDuration="36.026150965s" podCreationTimestamp="2025-12-05 05:56:28 +0000 UTC" firstStartedPulling="2025-12-05 05:56:29.328141626 +0000 UTC m=+1791.564871892" lastFinishedPulling="2025-12-05 05:56:29.814282781 +0000 UTC m=+1792.051013048" observedRunningTime="2025-12-05 05:56:30.506448831 +0000 UTC m=+1792.743179098" watchObservedRunningTime="2025-12-05 05:57:04.026150965 +0000 UTC m=+1826.262881232" Dec 05 05:57:04 crc kubenswrapper[4652]: I1205 05:57:04.031797 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-zn44j"] Dec 05 05:57:04 crc kubenswrapper[4652]: I1205 05:57:04.038348 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-zn44j"] Dec 05 05:57:04 crc kubenswrapper[4652]: I1205 05:57:04.134121 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b873dbf3-75d1-4cdf-b213-f17952cd0bc8" path="/var/lib/kubelet/pods/b873dbf3-75d1-4cdf-b213-f17952cd0bc8/volumes" Dec 05 05:57:04 crc kubenswrapper[4652]: I1205 05:57:04.736930 4652 generic.go:334] "Generic (PLEG): container finished" podID="ea47dfbf-ed1f-4197-9c06-68290a722e2d" containerID="1face9adc5222ed276ad2f61aaa4219bbbc8c4f5e3cc2090d98147463e34a90a" exitCode=0 Dec 05 05:57:04 crc kubenswrapper[4652]: I1205 05:57:04.736973 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" event={"ID":"ea47dfbf-ed1f-4197-9c06-68290a722e2d","Type":"ContainerDied","Data":"1face9adc5222ed276ad2f61aaa4219bbbc8c4f5e3cc2090d98147463e34a90a"} Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.060308 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.085012 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97ssr\" (UniqueName: \"kubernetes.io/projected/ea47dfbf-ed1f-4197-9c06-68290a722e2d-kube-api-access-97ssr\") pod \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.085167 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-inventory\") pod \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.085271 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-ssh-key\") pod \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\" (UID: \"ea47dfbf-ed1f-4197-9c06-68290a722e2d\") " Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.090511 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea47dfbf-ed1f-4197-9c06-68290a722e2d-kube-api-access-97ssr" (OuterVolumeSpecName: "kube-api-access-97ssr") pod "ea47dfbf-ed1f-4197-9c06-68290a722e2d" (UID: "ea47dfbf-ed1f-4197-9c06-68290a722e2d"). InnerVolumeSpecName "kube-api-access-97ssr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.108734 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-inventory" (OuterVolumeSpecName: "inventory") pod "ea47dfbf-ed1f-4197-9c06-68290a722e2d" (UID: "ea47dfbf-ed1f-4197-9c06-68290a722e2d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.110106 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ea47dfbf-ed1f-4197-9c06-68290a722e2d" (UID: "ea47dfbf-ed1f-4197-9c06-68290a722e2d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.187733 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.187760 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97ssr\" (UniqueName: \"kubernetes.io/projected/ea47dfbf-ed1f-4197-9c06-68290a722e2d-kube-api-access-97ssr\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.187771 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea47dfbf-ed1f-4197-9c06-68290a722e2d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.750531 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" event={"ID":"ea47dfbf-ed1f-4197-9c06-68290a722e2d","Type":"ContainerDied","Data":"1bfef7ce751ee748b3a8e260f55e41696836f342d962d1c603a28e98f4242ce8"} Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.750593 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bfef7ce751ee748b3a8e260f55e41696836f342d962d1c603a28e98f4242ce8" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.750630 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-vcvww" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.817507 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xrfzl"] Dec 05 05:57:06 crc kubenswrapper[4652]: E1205 05:57:06.817936 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea47dfbf-ed1f-4197-9c06-68290a722e2d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.817957 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea47dfbf-ed1f-4197-9c06-68290a722e2d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.818120 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea47dfbf-ed1f-4197-9c06-68290a722e2d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.818751 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.820849 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.821717 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.827869 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.828066 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.845287 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xrfzl"] Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.899361 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwdhs\" (UniqueName: \"kubernetes.io/projected/5878bd6d-36a3-44f1-9238-3c1160202f82-kube-api-access-rwdhs\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.899622 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:06 crc kubenswrapper[4652]: I1205 05:57:06.899831 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.002032 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.002219 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwdhs\" (UniqueName: \"kubernetes.io/projected/5878bd6d-36a3-44f1-9238-3c1160202f82-kube-api-access-rwdhs\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.002312 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.006281 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.006715 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.016155 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwdhs\" (UniqueName: \"kubernetes.io/projected/5878bd6d-36a3-44f1-9238-3c1160202f82-kube-api-access-rwdhs\") pod \"ssh-known-hosts-edpm-deployment-xrfzl\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.142889 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.577148 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xrfzl"] Dec 05 05:57:07 crc kubenswrapper[4652]: I1205 05:57:07.758568 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" event={"ID":"5878bd6d-36a3-44f1-9238-3c1160202f82","Type":"ContainerStarted","Data":"e4602e97ab96d1eaf03ecc032fd4801ade73f46a0b20888da592f6b1ea9d37e9"} Dec 05 05:57:08 crc kubenswrapper[4652]: I1205 05:57:08.766800 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" event={"ID":"5878bd6d-36a3-44f1-9238-3c1160202f82","Type":"ContainerStarted","Data":"d018bef3426a2244e56cf71c1caa0f1750f88b4d1701b7ab77dc553912dfefd5"} Dec 05 05:57:08 crc kubenswrapper[4652]: I1205 05:57:08.784082 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" podStartSLOduration=2.191400096 podStartE2EDuration="2.784065934s" podCreationTimestamp="2025-12-05 05:57:06 +0000 UTC" firstStartedPulling="2025-12-05 05:57:07.583543816 +0000 UTC m=+1829.820274083" lastFinishedPulling="2025-12-05 05:57:08.176209654 +0000 UTC m=+1830.412939921" observedRunningTime="2025-12-05 05:57:08.777799932 +0000 UTC m=+1831.014530199" watchObservedRunningTime="2025-12-05 05:57:08.784065934 +0000 UTC m=+1831.020796201" Dec 05 05:57:13 crc kubenswrapper[4652]: I1205 05:57:13.802304 4652 generic.go:334] "Generic (PLEG): container finished" podID="5878bd6d-36a3-44f1-9238-3c1160202f82" containerID="d018bef3426a2244e56cf71c1caa0f1750f88b4d1701b7ab77dc553912dfefd5" exitCode=0 Dec 05 05:57:13 crc kubenswrapper[4652]: I1205 05:57:13.802384 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" event={"ID":"5878bd6d-36a3-44f1-9238-3c1160202f82","Type":"ContainerDied","Data":"d018bef3426a2244e56cf71c1caa0f1750f88b4d1701b7ab77dc553912dfefd5"} Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.138659 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.264028 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwdhs\" (UniqueName: \"kubernetes.io/projected/5878bd6d-36a3-44f1-9238-3c1160202f82-kube-api-access-rwdhs\") pod \"5878bd6d-36a3-44f1-9238-3c1160202f82\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.264066 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-ssh-key-openstack-edpm-ipam\") pod \"5878bd6d-36a3-44f1-9238-3c1160202f82\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.264184 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-inventory-0\") pod \"5878bd6d-36a3-44f1-9238-3c1160202f82\" (UID: \"5878bd6d-36a3-44f1-9238-3c1160202f82\") " Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.269607 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5878bd6d-36a3-44f1-9238-3c1160202f82-kube-api-access-rwdhs" (OuterVolumeSpecName: "kube-api-access-rwdhs") pod "5878bd6d-36a3-44f1-9238-3c1160202f82" (UID: "5878bd6d-36a3-44f1-9238-3c1160202f82"). InnerVolumeSpecName "kube-api-access-rwdhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.287861 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "5878bd6d-36a3-44f1-9238-3c1160202f82" (UID: "5878bd6d-36a3-44f1-9238-3c1160202f82"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.287899 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "5878bd6d-36a3-44f1-9238-3c1160202f82" (UID: "5878bd6d-36a3-44f1-9238-3c1160202f82"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.366415 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwdhs\" (UniqueName: \"kubernetes.io/projected/5878bd6d-36a3-44f1-9238-3c1160202f82-kube-api-access-rwdhs\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.366444 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.366454 4652 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5878bd6d-36a3-44f1-9238-3c1160202f82-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.819259 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" event={"ID":"5878bd6d-36a3-44f1-9238-3c1160202f82","Type":"ContainerDied","Data":"e4602e97ab96d1eaf03ecc032fd4801ade73f46a0b20888da592f6b1ea9d37e9"} Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.819547 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4602e97ab96d1eaf03ecc032fd4801ade73f46a0b20888da592f6b1ea9d37e9" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.819310 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xrfzl" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.880395 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg"] Dec 05 05:57:15 crc kubenswrapper[4652]: E1205 05:57:15.881295 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5878bd6d-36a3-44f1-9238-3c1160202f82" containerName="ssh-known-hosts-edpm-deployment" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.881409 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="5878bd6d-36a3-44f1-9238-3c1160202f82" containerName="ssh-known-hosts-edpm-deployment" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.881950 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="5878bd6d-36a3-44f1-9238-3c1160202f82" containerName="ssh-known-hosts-edpm-deployment" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.883152 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.887339 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.887439 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.888602 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.888759 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.892406 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg"] Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.982089 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv6ch\" (UniqueName: \"kubernetes.io/projected/c96e3166-ea9f-4421-b31f-dec147d6b7c9-kube-api-access-pv6ch\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.982339 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:15 crc kubenswrapper[4652]: I1205 05:57:15.982614 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.084283 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.084453 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv6ch\" (UniqueName: \"kubernetes.io/projected/c96e3166-ea9f-4421-b31f-dec147d6b7c9-kube-api-access-pv6ch\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.084492 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.088032 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.089249 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.097969 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv6ch\" (UniqueName: \"kubernetes.io/projected/c96e3166-ea9f-4421-b31f-dec147d6b7c9-kube-api-access-pv6ch\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7zwcg\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.201421 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.638358 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg"] Dec 05 05:57:16 crc kubenswrapper[4652]: I1205 05:57:16.827692 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" event={"ID":"c96e3166-ea9f-4421-b31f-dec147d6b7c9","Type":"ContainerStarted","Data":"53809e954621f4f179991fc17a4572027e8c46c21f01150d821d80c49909739b"} Dec 05 05:57:17 crc kubenswrapper[4652]: I1205 05:57:17.835148 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" event={"ID":"c96e3166-ea9f-4421-b31f-dec147d6b7c9","Type":"ContainerStarted","Data":"bacb6f2229eaba51167f649addbcda49f98e0342bdc5027d1ec09e425a36dac5"} Dec 05 05:57:17 crc kubenswrapper[4652]: I1205 05:57:17.847413 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" podStartSLOduration=2.313990927 podStartE2EDuration="2.847398628s" podCreationTimestamp="2025-12-05 05:57:15 +0000 UTC" firstStartedPulling="2025-12-05 05:57:16.639502414 +0000 UTC m=+1838.876232682" lastFinishedPulling="2025-12-05 05:57:17.172910116 +0000 UTC m=+1839.409640383" observedRunningTime="2025-12-05 05:57:17.845055121 +0000 UTC m=+1840.081785388" watchObservedRunningTime="2025-12-05 05:57:17.847398628 +0000 UTC m=+1840.084128894" Dec 05 05:57:23 crc kubenswrapper[4652]: I1205 05:57:23.894939 4652 generic.go:334] "Generic (PLEG): container finished" podID="c96e3166-ea9f-4421-b31f-dec147d6b7c9" containerID="bacb6f2229eaba51167f649addbcda49f98e0342bdc5027d1ec09e425a36dac5" exitCode=0 Dec 05 05:57:23 crc kubenswrapper[4652]: I1205 05:57:23.894980 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" event={"ID":"c96e3166-ea9f-4421-b31f-dec147d6b7c9","Type":"ContainerDied","Data":"bacb6f2229eaba51167f649addbcda49f98e0342bdc5027d1ec09e425a36dac5"} Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.209821 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.248696 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-ssh-key\") pod \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.248732 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv6ch\" (UniqueName: \"kubernetes.io/projected/c96e3166-ea9f-4421-b31f-dec147d6b7c9-kube-api-access-pv6ch\") pod \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.248881 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-inventory\") pod \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\" (UID: \"c96e3166-ea9f-4421-b31f-dec147d6b7c9\") " Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.254700 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c96e3166-ea9f-4421-b31f-dec147d6b7c9-kube-api-access-pv6ch" (OuterVolumeSpecName: "kube-api-access-pv6ch") pod "c96e3166-ea9f-4421-b31f-dec147d6b7c9" (UID: "c96e3166-ea9f-4421-b31f-dec147d6b7c9"). InnerVolumeSpecName "kube-api-access-pv6ch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.271876 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c96e3166-ea9f-4421-b31f-dec147d6b7c9" (UID: "c96e3166-ea9f-4421-b31f-dec147d6b7c9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.271951 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-inventory" (OuterVolumeSpecName: "inventory") pod "c96e3166-ea9f-4421-b31f-dec147d6b7c9" (UID: "c96e3166-ea9f-4421-b31f-dec147d6b7c9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.351254 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.351280 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96e3166-ea9f-4421-b31f-dec147d6b7c9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.351308 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv6ch\" (UniqueName: \"kubernetes.io/projected/c96e3166-ea9f-4421-b31f-dec147d6b7c9-kube-api-access-pv6ch\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.910351 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" event={"ID":"c96e3166-ea9f-4421-b31f-dec147d6b7c9","Type":"ContainerDied","Data":"53809e954621f4f179991fc17a4572027e8c46c21f01150d821d80c49909739b"} Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.910550 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53809e954621f4f179991fc17a4572027e8c46c21f01150d821d80c49909739b" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.910390 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7zwcg" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.963064 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5"] Dec 05 05:57:25 crc kubenswrapper[4652]: E1205 05:57:25.963454 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c96e3166-ea9f-4421-b31f-dec147d6b7c9" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.963472 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c96e3166-ea9f-4421-b31f-dec147d6b7c9" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.963698 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c96e3166-ea9f-4421-b31f-dec147d6b7c9" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.964393 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.965875 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.966042 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.966333 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.967326 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:57:25 crc kubenswrapper[4652]: I1205 05:57:25.969881 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5"] Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.062453 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.062589 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.062659 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfbp7\" (UniqueName: \"kubernetes.io/projected/fb384ad5-89e0-44e5-b4db-09d13e563453-kube-api-access-xfbp7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.165150 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.165255 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.165318 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfbp7\" (UniqueName: \"kubernetes.io/projected/fb384ad5-89e0-44e5-b4db-09d13e563453-kube-api-access-xfbp7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.170329 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.170330 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.181810 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfbp7\" (UniqueName: \"kubernetes.io/projected/fb384ad5-89e0-44e5-b4db-09d13e563453-kube-api-access-xfbp7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.288112 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.782274 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5"] Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.787339 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 05:57:26 crc kubenswrapper[4652]: I1205 05:57:26.918421 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" event={"ID":"fb384ad5-89e0-44e5-b4db-09d13e563453","Type":"ContainerStarted","Data":"9cd5160bf93ec623407729628b7557a3addb27b1cd3592d3d32c5b6c3dd25e17"} Dec 05 05:57:27 crc kubenswrapper[4652]: I1205 05:57:27.926936 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" event={"ID":"fb384ad5-89e0-44e5-b4db-09d13e563453","Type":"ContainerStarted","Data":"f9ea13944bee61e813c720da0937d0f3e8bd8c8534ec55bc0a1acf244d6b2e4b"} Dec 05 05:57:27 crc kubenswrapper[4652]: I1205 05:57:27.941736 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" podStartSLOduration=2.448112626 podStartE2EDuration="2.941722886s" podCreationTimestamp="2025-12-05 05:57:25 +0000 UTC" firstStartedPulling="2025-12-05 05:57:26.787125512 +0000 UTC m=+1849.023855768" lastFinishedPulling="2025-12-05 05:57:27.280735761 +0000 UTC m=+1849.517466028" observedRunningTime="2025-12-05 05:57:27.940577823 +0000 UTC m=+1850.177308090" watchObservedRunningTime="2025-12-05 05:57:27.941722886 +0000 UTC m=+1850.178453152" Dec 05 05:57:34 crc kubenswrapper[4652]: I1205 05:57:34.980130 4652 generic.go:334] "Generic (PLEG): container finished" podID="fb384ad5-89e0-44e5-b4db-09d13e563453" containerID="f9ea13944bee61e813c720da0937d0f3e8bd8c8534ec55bc0a1acf244d6b2e4b" exitCode=0 Dec 05 05:57:34 crc kubenswrapper[4652]: I1205 05:57:34.980211 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" event={"ID":"fb384ad5-89e0-44e5-b4db-09d13e563453","Type":"ContainerDied","Data":"f9ea13944bee61e813c720da0937d0f3e8bd8c8534ec55bc0a1acf244d6b2e4b"} Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.363644 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.435334 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfbp7\" (UniqueName: \"kubernetes.io/projected/fb384ad5-89e0-44e5-b4db-09d13e563453-kube-api-access-xfbp7\") pod \"fb384ad5-89e0-44e5-b4db-09d13e563453\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.435418 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-inventory\") pod \"fb384ad5-89e0-44e5-b4db-09d13e563453\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.435485 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-ssh-key\") pod \"fb384ad5-89e0-44e5-b4db-09d13e563453\" (UID: \"fb384ad5-89e0-44e5-b4db-09d13e563453\") " Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.440971 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb384ad5-89e0-44e5-b4db-09d13e563453-kube-api-access-xfbp7" (OuterVolumeSpecName: "kube-api-access-xfbp7") pod "fb384ad5-89e0-44e5-b4db-09d13e563453" (UID: "fb384ad5-89e0-44e5-b4db-09d13e563453"). InnerVolumeSpecName "kube-api-access-xfbp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.458522 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-inventory" (OuterVolumeSpecName: "inventory") pod "fb384ad5-89e0-44e5-b4db-09d13e563453" (UID: "fb384ad5-89e0-44e5-b4db-09d13e563453"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.460636 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fb384ad5-89e0-44e5-b4db-09d13e563453" (UID: "fb384ad5-89e0-44e5-b4db-09d13e563453"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.539477 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfbp7\" (UniqueName: \"kubernetes.io/projected/fb384ad5-89e0-44e5-b4db-09d13e563453-kube-api-access-xfbp7\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.539520 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:36 crc kubenswrapper[4652]: I1205 05:57:36.539547 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb384ad5-89e0-44e5-b4db-09d13e563453-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.000538 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" event={"ID":"fb384ad5-89e0-44e5-b4db-09d13e563453","Type":"ContainerDied","Data":"9cd5160bf93ec623407729628b7557a3addb27b1cd3592d3d32c5b6c3dd25e17"} Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.000604 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cd5160bf93ec623407729628b7557a3addb27b1cd3592d3d32c5b6c3dd25e17" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.000628 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.147992 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4"] Dec 05 05:57:37 crc kubenswrapper[4652]: E1205 05:57:37.148716 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb384ad5-89e0-44e5-b4db-09d13e563453" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.148983 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb384ad5-89e0-44e5-b4db-09d13e563453" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.149548 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb384ad5-89e0-44e5-b4db-09d13e563453" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.150571 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.155157 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.158399 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.158616 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.158889 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.160251 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.160417 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.160415 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.160632 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.168241 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4"] Dec 05 05:57:37 crc kubenswrapper[4652]: E1205 05:57:37.191261 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb384ad5_89e0_44e5_b4db_09d13e563453.slice/crio-9cd5160bf93ec623407729628b7557a3addb27b1cd3592d3d32c5b6c3dd25e17\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb384ad5_89e0_44e5_b4db_09d13e563453.slice\": RecentStats: unable to find data in memory cache]" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256327 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256509 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256542 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256578 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256673 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbt2p\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-kube-api-access-qbt2p\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256703 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256725 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256742 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256759 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256891 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256934 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.256997 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.257063 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.257123 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360110 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbt2p\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-kube-api-access-qbt2p\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360204 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360251 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360282 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360311 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360346 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360372 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360398 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360439 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360479 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360518 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360582 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360609 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.360643 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.365491 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.365893 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.365996 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.366141 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.366290 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.367433 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.367800 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.367856 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.368309 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.368483 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.369193 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.369709 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.370683 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.375832 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbt2p\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-kube-api-access-qbt2p\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.480355 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:57:37 crc kubenswrapper[4652]: I1205 05:57:37.920023 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4"] Dec 05 05:57:38 crc kubenswrapper[4652]: I1205 05:57:38.009830 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" event={"ID":"f53983a0-2e5c-43e3-87eb-e58ed8c7c082","Type":"ContainerStarted","Data":"42580bc09db456728059d71c16df4f9af56e06f9e418fdcb06f0ff13db0cbda0"} Dec 05 05:57:38 crc kubenswrapper[4652]: I1205 05:57:38.456262 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:57:39 crc kubenswrapper[4652]: I1205 05:57:39.021520 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" event={"ID":"f53983a0-2e5c-43e3-87eb-e58ed8c7c082","Type":"ContainerStarted","Data":"d60ca70b3244285f809c73856bdfe2a69aadcadc2477a5f97affe14295525437"} Dec 05 05:57:39 crc kubenswrapper[4652]: I1205 05:57:39.042724 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" podStartSLOduration=1.514028035 podStartE2EDuration="2.042706905s" podCreationTimestamp="2025-12-05 05:57:37 +0000 UTC" firstStartedPulling="2025-12-05 05:57:37.925299231 +0000 UTC m=+1860.162029498" lastFinishedPulling="2025-12-05 05:57:38.453978101 +0000 UTC m=+1860.690708368" observedRunningTime="2025-12-05 05:57:39.036306251 +0000 UTC m=+1861.273036518" watchObservedRunningTime="2025-12-05 05:57:39.042706905 +0000 UTC m=+1861.279437172" Dec 05 05:57:41 crc kubenswrapper[4652]: I1205 05:57:41.113002 4652 scope.go:117] "RemoveContainer" containerID="a8b4c104548f841f5a694907ef0d52bd2d273200f23763ea80c48d4192abe0b1" Dec 05 05:58:05 crc kubenswrapper[4652]: I1205 05:58:05.227174 4652 generic.go:334] "Generic (PLEG): container finished" podID="f53983a0-2e5c-43e3-87eb-e58ed8c7c082" containerID="d60ca70b3244285f809c73856bdfe2a69aadcadc2477a5f97affe14295525437" exitCode=0 Dec 05 05:58:05 crc kubenswrapper[4652]: I1205 05:58:05.227258 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" event={"ID":"f53983a0-2e5c-43e3-87eb-e58ed8c7c082","Type":"ContainerDied","Data":"d60ca70b3244285f809c73856bdfe2a69aadcadc2477a5f97affe14295525437"} Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.641280 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.804206 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-libvirt-combined-ca-bundle\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.804316 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ovn-combined-ca-bundle\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.804336 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ssh-key\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.804453 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-telemetry-combined-ca-bundle\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.804474 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-inventory\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.804496 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.804543 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.805009 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-bootstrap-combined-ca-bundle\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.805087 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.805120 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbt2p\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-kube-api-access-qbt2p\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.805142 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-ovn-default-certs-0\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.805165 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-neutron-metadata-combined-ca-bundle\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.805184 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-repo-setup-combined-ca-bundle\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.805219 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-nova-combined-ca-bundle\") pod \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\" (UID: \"f53983a0-2e5c-43e3-87eb-e58ed8c7c082\") " Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.813138 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.813988 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.813996 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.814353 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.814440 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.814764 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.814793 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.814812 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.814892 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.815211 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.817470 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.817517 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-kube-api-access-qbt2p" (OuterVolumeSpecName: "kube-api-access-qbt2p") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "kube-api-access-qbt2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.832722 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.832933 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-inventory" (OuterVolumeSpecName: "inventory") pod "f53983a0-2e5c-43e3-87eb-e58ed8c7c082" (UID: "f53983a0-2e5c-43e3-87eb-e58ed8c7c082"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907214 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907243 4652 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907255 4652 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907264 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907273 4652 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907283 4652 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907293 4652 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907301 4652 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907311 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbt2p\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-kube-api-access-qbt2p\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907319 4652 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907327 4652 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907336 4652 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907346 4652 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:06 crc kubenswrapper[4652]: I1205 05:58:06.907355 4652 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53983a0-2e5c-43e3-87eb-e58ed8c7c082-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.248468 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" event={"ID":"f53983a0-2e5c-43e3-87eb-e58ed8c7c082","Type":"ContainerDied","Data":"42580bc09db456728059d71c16df4f9af56e06f9e418fdcb06f0ff13db0cbda0"} Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.248680 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42580bc09db456728059d71c16df4f9af56e06f9e418fdcb06f0ff13db0cbda0" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.248505 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.340177 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56"] Dec 05 05:58:07 crc kubenswrapper[4652]: E1205 05:58:07.340598 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f53983a0-2e5c-43e3-87eb-e58ed8c7c082" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.340618 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f53983a0-2e5c-43e3-87eb-e58ed8c7c082" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.340824 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f53983a0-2e5c-43e3-87eb-e58ed8c7c082" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.341501 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.342995 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.343162 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.343198 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.343348 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.344985 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.351522 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56"] Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.415378 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zffjn\" (UniqueName: \"kubernetes.io/projected/499e9d22-ac03-4546-95bb-490b880a35fa-kube-api-access-zffjn\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.415578 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.415780 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.415928 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.415993 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/499e9d22-ac03-4546-95bb-490b880a35fa-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.517542 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zffjn\" (UniqueName: \"kubernetes.io/projected/499e9d22-ac03-4546-95bb-490b880a35fa-kube-api-access-zffjn\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.517660 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.517780 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.517855 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.517893 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/499e9d22-ac03-4546-95bb-490b880a35fa-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.518735 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/499e9d22-ac03-4546-95bb-490b880a35fa-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.522297 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.522658 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.522753 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.532900 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zffjn\" (UniqueName: \"kubernetes.io/projected/499e9d22-ac03-4546-95bb-490b880a35fa-kube-api-access-zffjn\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5qc56\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:07 crc kubenswrapper[4652]: I1205 05:58:07.658096 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:08 crc kubenswrapper[4652]: I1205 05:58:08.089853 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56"] Dec 05 05:58:08 crc kubenswrapper[4652]: W1205 05:58:08.094119 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod499e9d22_ac03_4546_95bb_490b880a35fa.slice/crio-b3076942eeda67e0c4e48b382b8382242474f1870dca8c3667c6246af549a485 WatchSource:0}: Error finding container b3076942eeda67e0c4e48b382b8382242474f1870dca8c3667c6246af549a485: Status 404 returned error can't find the container with id b3076942eeda67e0c4e48b382b8382242474f1870dca8c3667c6246af549a485 Dec 05 05:58:08 crc kubenswrapper[4652]: I1205 05:58:08.256223 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" event={"ID":"499e9d22-ac03-4546-95bb-490b880a35fa","Type":"ContainerStarted","Data":"b3076942eeda67e0c4e48b382b8382242474f1870dca8c3667c6246af549a485"} Dec 05 05:58:09 crc kubenswrapper[4652]: I1205 05:58:09.264508 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" event={"ID":"499e9d22-ac03-4546-95bb-490b880a35fa","Type":"ContainerStarted","Data":"a9d1915f9b457b1653e3514f572c680f33dc09a463792d17de17f64beaf71883"} Dec 05 05:58:09 crc kubenswrapper[4652]: I1205 05:58:09.297483 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" podStartSLOduration=1.728287462 podStartE2EDuration="2.297464789s" podCreationTimestamp="2025-12-05 05:58:07 +0000 UTC" firstStartedPulling="2025-12-05 05:58:08.096045508 +0000 UTC m=+1890.332775776" lastFinishedPulling="2025-12-05 05:58:08.665222836 +0000 UTC m=+1890.901953103" observedRunningTime="2025-12-05 05:58:09.286177344 +0000 UTC m=+1891.522907611" watchObservedRunningTime="2025-12-05 05:58:09.297464789 +0000 UTC m=+1891.534195056" Dec 05 05:58:34 crc kubenswrapper[4652]: I1205 05:58:34.150580 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:58:34 crc kubenswrapper[4652]: I1205 05:58:34.150967 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:58:51 crc kubenswrapper[4652]: I1205 05:58:51.580076 4652 generic.go:334] "Generic (PLEG): container finished" podID="499e9d22-ac03-4546-95bb-490b880a35fa" containerID="a9d1915f9b457b1653e3514f572c680f33dc09a463792d17de17f64beaf71883" exitCode=0 Dec 05 05:58:51 crc kubenswrapper[4652]: I1205 05:58:51.580152 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" event={"ID":"499e9d22-ac03-4546-95bb-490b880a35fa","Type":"ContainerDied","Data":"a9d1915f9b457b1653e3514f572c680f33dc09a463792d17de17f64beaf71883"} Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.890665 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.925697 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ssh-key\") pod \"499e9d22-ac03-4546-95bb-490b880a35fa\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.925881 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-inventory\") pod \"499e9d22-ac03-4546-95bb-490b880a35fa\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.925915 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/499e9d22-ac03-4546-95bb-490b880a35fa-ovncontroller-config-0\") pod \"499e9d22-ac03-4546-95bb-490b880a35fa\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.925964 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zffjn\" (UniqueName: \"kubernetes.io/projected/499e9d22-ac03-4546-95bb-490b880a35fa-kube-api-access-zffjn\") pod \"499e9d22-ac03-4546-95bb-490b880a35fa\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.926069 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ovn-combined-ca-bundle\") pod \"499e9d22-ac03-4546-95bb-490b880a35fa\" (UID: \"499e9d22-ac03-4546-95bb-490b880a35fa\") " Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.931959 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "499e9d22-ac03-4546-95bb-490b880a35fa" (UID: "499e9d22-ac03-4546-95bb-490b880a35fa"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.931972 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/499e9d22-ac03-4546-95bb-490b880a35fa-kube-api-access-zffjn" (OuterVolumeSpecName: "kube-api-access-zffjn") pod "499e9d22-ac03-4546-95bb-490b880a35fa" (UID: "499e9d22-ac03-4546-95bb-490b880a35fa"). InnerVolumeSpecName "kube-api-access-zffjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.948276 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499e9d22-ac03-4546-95bb-490b880a35fa-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "499e9d22-ac03-4546-95bb-490b880a35fa" (UID: "499e9d22-ac03-4546-95bb-490b880a35fa"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.950050 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-inventory" (OuterVolumeSpecName: "inventory") pod "499e9d22-ac03-4546-95bb-490b880a35fa" (UID: "499e9d22-ac03-4546-95bb-490b880a35fa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:52 crc kubenswrapper[4652]: I1205 05:58:52.950349 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "499e9d22-ac03-4546-95bb-490b880a35fa" (UID: "499e9d22-ac03-4546-95bb-490b880a35fa"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.028717 4652 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.028966 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.028978 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499e9d22-ac03-4546-95bb-490b880a35fa-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.028989 4652 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/499e9d22-ac03-4546-95bb-490b880a35fa-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.028997 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zffjn\" (UniqueName: \"kubernetes.io/projected/499e9d22-ac03-4546-95bb-490b880a35fa-kube-api-access-zffjn\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.597422 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" event={"ID":"499e9d22-ac03-4546-95bb-490b880a35fa","Type":"ContainerDied","Data":"b3076942eeda67e0c4e48b382b8382242474f1870dca8c3667c6246af549a485"} Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.597468 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3076942eeda67e0c4e48b382b8382242474f1870dca8c3667c6246af549a485" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.597538 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5qc56" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.677720 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c"] Dec 05 05:58:53 crc kubenswrapper[4652]: E1205 05:58:53.678201 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="499e9d22-ac03-4546-95bb-490b880a35fa" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.678219 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="499e9d22-ac03-4546-95bb-490b880a35fa" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.678403 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="499e9d22-ac03-4546-95bb-490b880a35fa" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.679099 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.682675 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.683184 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.683742 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.685597 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.686116 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.686221 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.691569 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c"] Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.743740 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.743857 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv4pp\" (UniqueName: \"kubernetes.io/projected/8b54e322-d9fc-4a77-815b-461aef555fba-kube-api-access-jv4pp\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.743893 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.744072 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.744220 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.744257 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.845796 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.845936 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.845970 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.846019 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.846156 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv4pp\" (UniqueName: \"kubernetes.io/projected/8b54e322-d9fc-4a77-815b-461aef555fba-kube-api-access-jv4pp\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.846194 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.851097 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.851114 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.851366 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.852672 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.853178 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.860885 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv4pp\" (UniqueName: \"kubernetes.io/projected/8b54e322-d9fc-4a77-815b-461aef555fba-kube-api-access-jv4pp\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:53 crc kubenswrapper[4652]: I1205 05:58:53.999391 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:58:54 crc kubenswrapper[4652]: I1205 05:58:54.488086 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c"] Dec 05 05:58:54 crc kubenswrapper[4652]: I1205 05:58:54.609601 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" event={"ID":"8b54e322-d9fc-4a77-815b-461aef555fba","Type":"ContainerStarted","Data":"26e95674f785ebf0268f501b4356a2b7527a673b434ff40ee497f1de91579bb8"} Dec 05 05:58:55 crc kubenswrapper[4652]: I1205 05:58:55.619167 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" event={"ID":"8b54e322-d9fc-4a77-815b-461aef555fba","Type":"ContainerStarted","Data":"907d515de34e0665844bacab18f78ddf897565632d31999ea953f0dafa89ec8c"} Dec 05 05:58:55 crc kubenswrapper[4652]: I1205 05:58:55.634193 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" podStartSLOduration=2.154668612 podStartE2EDuration="2.634177471s" podCreationTimestamp="2025-12-05 05:58:53 +0000 UTC" firstStartedPulling="2025-12-05 05:58:54.492186978 +0000 UTC m=+1936.728917245" lastFinishedPulling="2025-12-05 05:58:54.971695837 +0000 UTC m=+1937.208426104" observedRunningTime="2025-12-05 05:58:55.633661421 +0000 UTC m=+1937.870391687" watchObservedRunningTime="2025-12-05 05:58:55.634177471 +0000 UTC m=+1937.870907737" Dec 05 05:59:04 crc kubenswrapper[4652]: I1205 05:59:04.150808 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:59:04 crc kubenswrapper[4652]: I1205 05:59:04.151419 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:59:28 crc kubenswrapper[4652]: I1205 05:59:28.870044 4652 generic.go:334] "Generic (PLEG): container finished" podID="8b54e322-d9fc-4a77-815b-461aef555fba" containerID="907d515de34e0665844bacab18f78ddf897565632d31999ea953f0dafa89ec8c" exitCode=0 Dec 05 05:59:28 crc kubenswrapper[4652]: I1205 05:59:28.870083 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" event={"ID":"8b54e322-d9fc-4a77-815b-461aef555fba","Type":"ContainerDied","Data":"907d515de34e0665844bacab18f78ddf897565632d31999ea953f0dafa89ec8c"} Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.194184 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.347165 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-nova-metadata-neutron-config-0\") pod \"8b54e322-d9fc-4a77-815b-461aef555fba\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.347362 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-inventory\") pod \"8b54e322-d9fc-4a77-815b-461aef555fba\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.347404 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-metadata-combined-ca-bundle\") pod \"8b54e322-d9fc-4a77-815b-461aef555fba\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.347529 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-ovn-metadata-agent-neutron-config-0\") pod \"8b54e322-d9fc-4a77-815b-461aef555fba\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.348419 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jv4pp\" (UniqueName: \"kubernetes.io/projected/8b54e322-d9fc-4a77-815b-461aef555fba-kube-api-access-jv4pp\") pod \"8b54e322-d9fc-4a77-815b-461aef555fba\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.348465 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-ssh-key\") pod \"8b54e322-d9fc-4a77-815b-461aef555fba\" (UID: \"8b54e322-d9fc-4a77-815b-461aef555fba\") " Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.352650 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "8b54e322-d9fc-4a77-815b-461aef555fba" (UID: "8b54e322-d9fc-4a77-815b-461aef555fba"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.352662 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b54e322-d9fc-4a77-815b-461aef555fba-kube-api-access-jv4pp" (OuterVolumeSpecName: "kube-api-access-jv4pp") pod "8b54e322-d9fc-4a77-815b-461aef555fba" (UID: "8b54e322-d9fc-4a77-815b-461aef555fba"). InnerVolumeSpecName "kube-api-access-jv4pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.370085 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "8b54e322-d9fc-4a77-815b-461aef555fba" (UID: "8b54e322-d9fc-4a77-815b-461aef555fba"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.370922 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-inventory" (OuterVolumeSpecName: "inventory") pod "8b54e322-d9fc-4a77-815b-461aef555fba" (UID: "8b54e322-d9fc-4a77-815b-461aef555fba"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.371186 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "8b54e322-d9fc-4a77-815b-461aef555fba" (UID: "8b54e322-d9fc-4a77-815b-461aef555fba"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.371694 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8b54e322-d9fc-4a77-815b-461aef555fba" (UID: "8b54e322-d9fc-4a77-815b-461aef555fba"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.450319 4652 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.450345 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jv4pp\" (UniqueName: \"kubernetes.io/projected/8b54e322-d9fc-4a77-815b-461aef555fba-kube-api-access-jv4pp\") on node \"crc\" DevicePath \"\"" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.450355 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.450364 4652 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.450372 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.450381 4652 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b54e322-d9fc-4a77-815b-461aef555fba-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.894444 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" event={"ID":"8b54e322-d9fc-4a77-815b-461aef555fba","Type":"ContainerDied","Data":"26e95674f785ebf0268f501b4356a2b7527a673b434ff40ee497f1de91579bb8"} Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.894488 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26e95674f785ebf0268f501b4356a2b7527a673b434ff40ee497f1de91579bb8" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.894604 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.951299 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b"] Dec 05 05:59:30 crc kubenswrapper[4652]: E1205 05:59:30.951928 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b54e322-d9fc-4a77-815b-461aef555fba" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.951950 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b54e322-d9fc-4a77-815b-461aef555fba" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.952165 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b54e322-d9fc-4a77-815b-461aef555fba" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.952847 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.954253 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.954582 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.954736 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.955781 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.957513 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.957770 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdzxk\" (UniqueName: \"kubernetes.io/projected/03326baf-b566-4e1d-a8e6-07bb1c1535ad-kube-api-access-gdzxk\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.957855 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.957919 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.957945 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.958000 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:30 crc kubenswrapper[4652]: I1205 05:59:30.959358 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b"] Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.059496 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.059673 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdzxk\" (UniqueName: \"kubernetes.io/projected/03326baf-b566-4e1d-a8e6-07bb1c1535ad-kube-api-access-gdzxk\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.059815 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.059867 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.059890 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.062751 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.062814 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.062941 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.063291 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.071919 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdzxk\" (UniqueName: \"kubernetes.io/projected/03326baf-b566-4e1d-a8e6-07bb1c1535ad-kube-api-access-gdzxk\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-9q65b\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.265746 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.691987 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b"] Dec 05 05:59:31 crc kubenswrapper[4652]: I1205 05:59:31.901249 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" event={"ID":"03326baf-b566-4e1d-a8e6-07bb1c1535ad","Type":"ContainerStarted","Data":"37f3e66be18432c1ec5047abb61528b30d780d18895dfd5dd6cd162e3a40ff46"} Dec 05 05:59:32 crc kubenswrapper[4652]: I1205 05:59:32.910089 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" event={"ID":"03326baf-b566-4e1d-a8e6-07bb1c1535ad","Type":"ContainerStarted","Data":"cbf426f6d17582030b3bdb4698b9d3fb0b3350a23b9d980a63f5b0612939418c"} Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.150809 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.150859 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.150898 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.151665 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e91379ac0dd46adf59e35fc8b2698ec2cbbe53e58b4afec1ae42fc455feb90c9"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.151719 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://e91379ac0dd46adf59e35fc8b2698ec2cbbe53e58b4afec1ae42fc455feb90c9" gracePeriod=600 Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.926670 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="e91379ac0dd46adf59e35fc8b2698ec2cbbe53e58b4afec1ae42fc455feb90c9" exitCode=0 Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.926748 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"e91379ac0dd46adf59e35fc8b2698ec2cbbe53e58b4afec1ae42fc455feb90c9"} Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.927033 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454"} Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.927057 4652 scope.go:117] "RemoveContainer" containerID="b9dec49e12f8855e8433e1241c08e62da169aa00a3e4219144bc42b9663e9876" Dec 05 05:59:34 crc kubenswrapper[4652]: I1205 05:59:34.939650 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" podStartSLOduration=4.403255296 podStartE2EDuration="4.939637469s" podCreationTimestamp="2025-12-05 05:59:30 +0000 UTC" firstStartedPulling="2025-12-05 05:59:31.69734371 +0000 UTC m=+1973.934073977" lastFinishedPulling="2025-12-05 05:59:32.233725882 +0000 UTC m=+1974.470456150" observedRunningTime="2025-12-05 05:59:32.927177831 +0000 UTC m=+1975.163908098" watchObservedRunningTime="2025-12-05 05:59:34.939637469 +0000 UTC m=+1977.176367736" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.143075 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn"] Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.145257 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.146957 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.147504 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.154189 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn"] Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.198080 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-secret-volume\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.198276 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-config-volume\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.198372 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ccvb\" (UniqueName: \"kubernetes.io/projected/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-kube-api-access-2ccvb\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.301153 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-secret-volume\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.301234 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-config-volume\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.301277 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ccvb\" (UniqueName: \"kubernetes.io/projected/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-kube-api-access-2ccvb\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.302477 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-config-volume\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.306960 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-secret-volume\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.315579 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ccvb\" (UniqueName: \"kubernetes.io/projected/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-kube-api-access-2ccvb\") pod \"collect-profiles-29415240-j6vcn\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.472576 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:00 crc kubenswrapper[4652]: I1205 06:00:00.853980 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn"] Dec 05 06:00:01 crc kubenswrapper[4652]: I1205 06:00:01.154177 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" event={"ID":"f1c2f789-04c2-4616-a921-81d0d5a9c6bb","Type":"ContainerStarted","Data":"fe2f64774b90a19979247133f90428aa071bd48983363f35cf77bd77b37102d7"} Dec 05 06:00:01 crc kubenswrapper[4652]: I1205 06:00:01.156210 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" event={"ID":"f1c2f789-04c2-4616-a921-81d0d5a9c6bb","Type":"ContainerStarted","Data":"909af5320d039cdde28070044a9f67b88ea9d711e7c97357a2c581987472f929"} Dec 05 06:00:01 crc kubenswrapper[4652]: I1205 06:00:01.186286 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" podStartSLOduration=1.186266056 podStartE2EDuration="1.186266056s" podCreationTimestamp="2025-12-05 06:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:00:01.173949517 +0000 UTC m=+2003.410679804" watchObservedRunningTime="2025-12-05 06:00:01.186266056 +0000 UTC m=+2003.422996323" Dec 05 06:00:02 crc kubenswrapper[4652]: I1205 06:00:02.164207 4652 generic.go:334] "Generic (PLEG): container finished" podID="f1c2f789-04c2-4616-a921-81d0d5a9c6bb" containerID="fe2f64774b90a19979247133f90428aa071bd48983363f35cf77bd77b37102d7" exitCode=0 Dec 05 06:00:02 crc kubenswrapper[4652]: I1205 06:00:02.164334 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" event={"ID":"f1c2f789-04c2-4616-a921-81d0d5a9c6bb","Type":"ContainerDied","Data":"fe2f64774b90a19979247133f90428aa071bd48983363f35cf77bd77b37102d7"} Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.427160 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.579376 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-secret-volume\") pod \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.579478 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ccvb\" (UniqueName: \"kubernetes.io/projected/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-kube-api-access-2ccvb\") pod \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.579648 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-config-volume\") pod \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\" (UID: \"f1c2f789-04c2-4616-a921-81d0d5a9c6bb\") " Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.580351 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-config-volume" (OuterVolumeSpecName: "config-volume") pod "f1c2f789-04c2-4616-a921-81d0d5a9c6bb" (UID: "f1c2f789-04c2-4616-a921-81d0d5a9c6bb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.585268 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-kube-api-access-2ccvb" (OuterVolumeSpecName: "kube-api-access-2ccvb") pod "f1c2f789-04c2-4616-a921-81d0d5a9c6bb" (UID: "f1c2f789-04c2-4616-a921-81d0d5a9c6bb"). InnerVolumeSpecName "kube-api-access-2ccvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.586122 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f1c2f789-04c2-4616-a921-81d0d5a9c6bb" (UID: "f1c2f789-04c2-4616-a921-81d0d5a9c6bb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.682247 4652 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.682280 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ccvb\" (UniqueName: \"kubernetes.io/projected/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-kube-api-access-2ccvb\") on node \"crc\" DevicePath \"\"" Dec 05 06:00:03 crc kubenswrapper[4652]: I1205 06:00:03.682289 4652 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f1c2f789-04c2-4616-a921-81d0d5a9c6bb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:00:04 crc kubenswrapper[4652]: I1205 06:00:04.178861 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" event={"ID":"f1c2f789-04c2-4616-a921-81d0d5a9c6bb","Type":"ContainerDied","Data":"909af5320d039cdde28070044a9f67b88ea9d711e7c97357a2c581987472f929"} Dec 05 06:00:04 crc kubenswrapper[4652]: I1205 06:00:04.179062 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="909af5320d039cdde28070044a9f67b88ea9d711e7c97357a2c581987472f929" Dec 05 06:00:04 crc kubenswrapper[4652]: I1205 06:00:04.178899 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn" Dec 05 06:00:04 crc kubenswrapper[4652]: I1205 06:00:04.237202 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g"] Dec 05 06:00:04 crc kubenswrapper[4652]: I1205 06:00:04.247056 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415195-7pk9g"] Dec 05 06:00:06 crc kubenswrapper[4652]: I1205 06:00:06.137627 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c118e33-8f43-4a93-9fba-c4562daa381b" path="/var/lib/kubelet/pods/9c118e33-8f43-4a93-9fba-c4562daa381b/volumes" Dec 05 06:00:41 crc kubenswrapper[4652]: I1205 06:00:41.212668 4652 scope.go:117] "RemoveContainer" containerID="fb686635f7cc30085bf6cea23a2ff2952cb2b43881aca3905b73be536d3cb588" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.144984 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415241-4bkxc"] Dec 05 06:01:00 crc kubenswrapper[4652]: E1205 06:01:00.145846 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1c2f789-04c2-4616-a921-81d0d5a9c6bb" containerName="collect-profiles" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.145859 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1c2f789-04c2-4616-a921-81d0d5a9c6bb" containerName="collect-profiles" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.146065 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1c2f789-04c2-4616-a921-81d0d5a9c6bb" containerName="collect-profiles" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.146684 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.163198 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415241-4bkxc"] Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.269500 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-config-data\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.269641 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh8bb\" (UniqueName: \"kubernetes.io/projected/35c364c2-b30a-448e-a393-ab53fa553df3-kube-api-access-kh8bb\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.269752 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-combined-ca-bundle\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.270400 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-fernet-keys\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.371939 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-fernet-keys\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.372029 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-config-data\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.372096 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh8bb\" (UniqueName: \"kubernetes.io/projected/35c364c2-b30a-448e-a393-ab53fa553df3-kube-api-access-kh8bb\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.372185 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-combined-ca-bundle\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.378120 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-fernet-keys\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.378202 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-combined-ca-bundle\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.378428 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-config-data\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.388081 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh8bb\" (UniqueName: \"kubernetes.io/projected/35c364c2-b30a-448e-a393-ab53fa553df3-kube-api-access-kh8bb\") pod \"keystone-cron-29415241-4bkxc\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.461994 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:00 crc kubenswrapper[4652]: I1205 06:01:00.854628 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415241-4bkxc"] Dec 05 06:01:01 crc kubenswrapper[4652]: I1205 06:01:01.670341 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415241-4bkxc" event={"ID":"35c364c2-b30a-448e-a393-ab53fa553df3","Type":"ContainerStarted","Data":"ed5c3663c57c197cbcdfc71ed373565e1677fdd8ff73be6ee581869933ddb643"} Dec 05 06:01:01 crc kubenswrapper[4652]: I1205 06:01:01.670653 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415241-4bkxc" event={"ID":"35c364c2-b30a-448e-a393-ab53fa553df3","Type":"ContainerStarted","Data":"a4f52b7d5dcbc1076a932fe7a1559ef1ea08efc7f0b5c0cb39096ac0abf4e9ec"} Dec 05 06:01:01 crc kubenswrapper[4652]: I1205 06:01:01.685321 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415241-4bkxc" podStartSLOduration=1.685309025 podStartE2EDuration="1.685309025s" podCreationTimestamp="2025-12-05 06:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:01:01.684482342 +0000 UTC m=+2063.921212619" watchObservedRunningTime="2025-12-05 06:01:01.685309025 +0000 UTC m=+2063.922039292" Dec 05 06:01:03 crc kubenswrapper[4652]: I1205 06:01:03.692404 4652 generic.go:334] "Generic (PLEG): container finished" podID="35c364c2-b30a-448e-a393-ab53fa553df3" containerID="ed5c3663c57c197cbcdfc71ed373565e1677fdd8ff73be6ee581869933ddb643" exitCode=0 Dec 05 06:01:03 crc kubenswrapper[4652]: I1205 06:01:03.692737 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415241-4bkxc" event={"ID":"35c364c2-b30a-448e-a393-ab53fa553df3","Type":"ContainerDied","Data":"ed5c3663c57c197cbcdfc71ed373565e1677fdd8ff73be6ee581869933ddb643"} Dec 05 06:01:04 crc kubenswrapper[4652]: I1205 06:01:04.969456 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.093591 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kh8bb\" (UniqueName: \"kubernetes.io/projected/35c364c2-b30a-448e-a393-ab53fa553df3-kube-api-access-kh8bb\") pod \"35c364c2-b30a-448e-a393-ab53fa553df3\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.093829 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-combined-ca-bundle\") pod \"35c364c2-b30a-448e-a393-ab53fa553df3\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.093867 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-config-data\") pod \"35c364c2-b30a-448e-a393-ab53fa553df3\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.094092 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-fernet-keys\") pod \"35c364c2-b30a-448e-a393-ab53fa553df3\" (UID: \"35c364c2-b30a-448e-a393-ab53fa553df3\") " Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.106600 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35c364c2-b30a-448e-a393-ab53fa553df3-kube-api-access-kh8bb" (OuterVolumeSpecName: "kube-api-access-kh8bb") pod "35c364c2-b30a-448e-a393-ab53fa553df3" (UID: "35c364c2-b30a-448e-a393-ab53fa553df3"). InnerVolumeSpecName "kube-api-access-kh8bb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.142739 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "35c364c2-b30a-448e-a393-ab53fa553df3" (UID: "35c364c2-b30a-448e-a393-ab53fa553df3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.199995 4652 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.200032 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kh8bb\" (UniqueName: \"kubernetes.io/projected/35c364c2-b30a-448e-a393-ab53fa553df3-kube-api-access-kh8bb\") on node \"crc\" DevicePath \"\"" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.220152 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35c364c2-b30a-448e-a393-ab53fa553df3" (UID: "35c364c2-b30a-448e-a393-ab53fa553df3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.222361 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-config-data" (OuterVolumeSpecName: "config-data") pod "35c364c2-b30a-448e-a393-ab53fa553df3" (UID: "35c364c2-b30a-448e-a393-ab53fa553df3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.303794 4652 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.304342 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35c364c2-b30a-448e-a393-ab53fa553df3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.723924 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415241-4bkxc" event={"ID":"35c364c2-b30a-448e-a393-ab53fa553df3","Type":"ContainerDied","Data":"a4f52b7d5dcbc1076a932fe7a1559ef1ea08efc7f0b5c0cb39096ac0abf4e9ec"} Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.723963 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4f52b7d5dcbc1076a932fe7a1559ef1ea08efc7f0b5c0cb39096ac0abf4e9ec" Dec 05 06:01:05 crc kubenswrapper[4652]: I1205 06:01:05.723972 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415241-4bkxc" Dec 05 06:01:14 crc kubenswrapper[4652]: I1205 06:01:14.828839 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rhln8"] Dec 05 06:01:14 crc kubenswrapper[4652]: E1205 06:01:14.830322 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35c364c2-b30a-448e-a393-ab53fa553df3" containerName="keystone-cron" Dec 05 06:01:14 crc kubenswrapper[4652]: I1205 06:01:14.830339 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="35c364c2-b30a-448e-a393-ab53fa553df3" containerName="keystone-cron" Dec 05 06:01:14 crc kubenswrapper[4652]: I1205 06:01:14.830671 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="35c364c2-b30a-448e-a393-ab53fa553df3" containerName="keystone-cron" Dec 05 06:01:14 crc kubenswrapper[4652]: I1205 06:01:14.832847 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:14 crc kubenswrapper[4652]: I1205 06:01:14.845130 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhln8"] Dec 05 06:01:14 crc kubenswrapper[4652]: I1205 06:01:14.934643 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-catalog-content\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:14 crc kubenswrapper[4652]: I1205 06:01:14.934708 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-utilities\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:14 crc kubenswrapper[4652]: I1205 06:01:14.934824 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9v5c\" (UniqueName: \"kubernetes.io/projected/d675111e-d2ad-48a3-833a-f72f531dd4c0-kube-api-access-k9v5c\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.037950 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-catalog-content\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.037999 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-utilities\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.038034 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9v5c\" (UniqueName: \"kubernetes.io/projected/d675111e-d2ad-48a3-833a-f72f531dd4c0-kube-api-access-k9v5c\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.038537 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-catalog-content\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.038629 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-utilities\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.055374 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9v5c\" (UniqueName: \"kubernetes.io/projected/d675111e-d2ad-48a3-833a-f72f531dd4c0-kube-api-access-k9v5c\") pod \"redhat-marketplace-rhln8\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.152288 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.551340 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhln8"] Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.822658 4652 generic.go:334] "Generic (PLEG): container finished" podID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerID="b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5" exitCode=0 Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.822780 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhln8" event={"ID":"d675111e-d2ad-48a3-833a-f72f531dd4c0","Type":"ContainerDied","Data":"b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5"} Dec 05 06:01:15 crc kubenswrapper[4652]: I1205 06:01:15.823082 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhln8" event={"ID":"d675111e-d2ad-48a3-833a-f72f531dd4c0","Type":"ContainerStarted","Data":"b1875bdf2c8514415bde4aee4d9645cdd5323f9864137213ff7885b8621bb31d"} Dec 05 06:01:16 crc kubenswrapper[4652]: I1205 06:01:16.838325 4652 generic.go:334] "Generic (PLEG): container finished" podID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerID="99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778" exitCode=0 Dec 05 06:01:16 crc kubenswrapper[4652]: I1205 06:01:16.838664 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhln8" event={"ID":"d675111e-d2ad-48a3-833a-f72f531dd4c0","Type":"ContainerDied","Data":"99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778"} Dec 05 06:01:17 crc kubenswrapper[4652]: I1205 06:01:17.851523 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhln8" event={"ID":"d675111e-d2ad-48a3-833a-f72f531dd4c0","Type":"ContainerStarted","Data":"0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64"} Dec 05 06:01:17 crc kubenswrapper[4652]: I1205 06:01:17.869238 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rhln8" podStartSLOduration=2.390389807 podStartE2EDuration="3.869218593s" podCreationTimestamp="2025-12-05 06:01:14 +0000 UTC" firstStartedPulling="2025-12-05 06:01:15.824600802 +0000 UTC m=+2078.061331069" lastFinishedPulling="2025-12-05 06:01:17.303429588 +0000 UTC m=+2079.540159855" observedRunningTime="2025-12-05 06:01:17.866104328 +0000 UTC m=+2080.102834595" watchObservedRunningTime="2025-12-05 06:01:17.869218593 +0000 UTC m=+2080.105948850" Dec 05 06:01:25 crc kubenswrapper[4652]: I1205 06:01:25.153143 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:25 crc kubenswrapper[4652]: I1205 06:01:25.153872 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:25 crc kubenswrapper[4652]: I1205 06:01:25.191469 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:25 crc kubenswrapper[4652]: I1205 06:01:25.971347 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:26 crc kubenswrapper[4652]: I1205 06:01:26.023924 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhln8"] Dec 05 06:01:27 crc kubenswrapper[4652]: I1205 06:01:27.943430 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rhln8" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerName="registry-server" containerID="cri-o://0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64" gracePeriod=2 Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.345696 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.369519 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-utilities\") pod \"d675111e-d2ad-48a3-833a-f72f531dd4c0\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.369845 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-catalog-content\") pod \"d675111e-d2ad-48a3-833a-f72f531dd4c0\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.370094 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9v5c\" (UniqueName: \"kubernetes.io/projected/d675111e-d2ad-48a3-833a-f72f531dd4c0-kube-api-access-k9v5c\") pod \"d675111e-d2ad-48a3-833a-f72f531dd4c0\" (UID: \"d675111e-d2ad-48a3-833a-f72f531dd4c0\") " Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.371394 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-utilities" (OuterVolumeSpecName: "utilities") pod "d675111e-d2ad-48a3-833a-f72f531dd4c0" (UID: "d675111e-d2ad-48a3-833a-f72f531dd4c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.378890 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d675111e-d2ad-48a3-833a-f72f531dd4c0-kube-api-access-k9v5c" (OuterVolumeSpecName: "kube-api-access-k9v5c") pod "d675111e-d2ad-48a3-833a-f72f531dd4c0" (UID: "d675111e-d2ad-48a3-833a-f72f531dd4c0"). InnerVolumeSpecName "kube-api-access-k9v5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.393769 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d675111e-d2ad-48a3-833a-f72f531dd4c0" (UID: "d675111e-d2ad-48a3-833a-f72f531dd4c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.474222 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.474262 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d675111e-d2ad-48a3-833a-f72f531dd4c0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.474276 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9v5c\" (UniqueName: \"kubernetes.io/projected/d675111e-d2ad-48a3-833a-f72f531dd4c0-kube-api-access-k9v5c\") on node \"crc\" DevicePath \"\"" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.957047 4652 generic.go:334] "Generic (PLEG): container finished" podID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerID="0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64" exitCode=0 Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.957364 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhln8" event={"ID":"d675111e-d2ad-48a3-833a-f72f531dd4c0","Type":"ContainerDied","Data":"0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64"} Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.957403 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rhln8" event={"ID":"d675111e-d2ad-48a3-833a-f72f531dd4c0","Type":"ContainerDied","Data":"b1875bdf2c8514415bde4aee4d9645cdd5323f9864137213ff7885b8621bb31d"} Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.957424 4652 scope.go:117] "RemoveContainer" containerID="0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.957617 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rhln8" Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.991792 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhln8"] Dec 05 06:01:28 crc kubenswrapper[4652]: I1205 06:01:28.998109 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rhln8"] Dec 05 06:01:29 crc kubenswrapper[4652]: I1205 06:01:29.006038 4652 scope.go:117] "RemoveContainer" containerID="99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778" Dec 05 06:01:29 crc kubenswrapper[4652]: I1205 06:01:29.035523 4652 scope.go:117] "RemoveContainer" containerID="b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5" Dec 05 06:01:29 crc kubenswrapper[4652]: I1205 06:01:29.064021 4652 scope.go:117] "RemoveContainer" containerID="0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64" Dec 05 06:01:29 crc kubenswrapper[4652]: E1205 06:01:29.064341 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64\": container with ID starting with 0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64 not found: ID does not exist" containerID="0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64" Dec 05 06:01:29 crc kubenswrapper[4652]: I1205 06:01:29.064377 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64"} err="failed to get container status \"0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64\": rpc error: code = NotFound desc = could not find container \"0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64\": container with ID starting with 0f845c3003763a540a7f9fc7673e58203e5f412601d37358108954a2b6697e64 not found: ID does not exist" Dec 05 06:01:29 crc kubenswrapper[4652]: I1205 06:01:29.064399 4652 scope.go:117] "RemoveContainer" containerID="99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778" Dec 05 06:01:29 crc kubenswrapper[4652]: E1205 06:01:29.064765 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778\": container with ID starting with 99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778 not found: ID does not exist" containerID="99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778" Dec 05 06:01:29 crc kubenswrapper[4652]: I1205 06:01:29.064818 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778"} err="failed to get container status \"99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778\": rpc error: code = NotFound desc = could not find container \"99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778\": container with ID starting with 99f4600ff0af42ba0a91212a42f64d65dbfa9a0d0410aefe3510863a9f926778 not found: ID does not exist" Dec 05 06:01:29 crc kubenswrapper[4652]: I1205 06:01:29.064852 4652 scope.go:117] "RemoveContainer" containerID="b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5" Dec 05 06:01:29 crc kubenswrapper[4652]: E1205 06:01:29.065186 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5\": container with ID starting with b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5 not found: ID does not exist" containerID="b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5" Dec 05 06:01:29 crc kubenswrapper[4652]: I1205 06:01:29.065216 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5"} err="failed to get container status \"b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5\": rpc error: code = NotFound desc = could not find container \"b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5\": container with ID starting with b8933449005dafacee3d18e2fbbb38ca34dd34ba8c68b9ac1c2ac23b0f241ac5 not found: ID does not exist" Dec 05 06:01:30 crc kubenswrapper[4652]: I1205 06:01:30.137816 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" path="/var/lib/kubelet/pods/d675111e-d2ad-48a3-833a-f72f531dd4c0/volumes" Dec 05 06:01:34 crc kubenswrapper[4652]: I1205 06:01:34.150608 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:01:34 crc kubenswrapper[4652]: I1205 06:01:34.150978 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:02:04 crc kubenswrapper[4652]: I1205 06:02:04.150973 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:02:04 crc kubenswrapper[4652]: I1205 06:02:04.151639 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.150809 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.151373 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.151415 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.151950 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.152003 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" gracePeriod=600 Dec 05 06:02:34 crc kubenswrapper[4652]: E1205 06:02:34.268012 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.524537 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" exitCode=0 Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.524612 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454"} Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.524687 4652 scope.go:117] "RemoveContainer" containerID="e91379ac0dd46adf59e35fc8b2698ec2cbbe53e58b4afec1ae42fc455feb90c9" Dec 05 06:02:34 crc kubenswrapper[4652]: I1205 06:02:34.525435 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:02:34 crc kubenswrapper[4652]: E1205 06:02:34.525850 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:02:38 crc kubenswrapper[4652]: I1205 06:02:38.561509 4652 generic.go:334] "Generic (PLEG): container finished" podID="03326baf-b566-4e1d-a8e6-07bb1c1535ad" containerID="cbf426f6d17582030b3bdb4698b9d3fb0b3350a23b9d980a63f5b0612939418c" exitCode=0 Dec 05 06:02:38 crc kubenswrapper[4652]: I1205 06:02:38.561586 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" event={"ID":"03326baf-b566-4e1d-a8e6-07bb1c1535ad","Type":"ContainerDied","Data":"cbf426f6d17582030b3bdb4698b9d3fb0b3350a23b9d980a63f5b0612939418c"} Dec 05 06:02:39 crc kubenswrapper[4652]: I1205 06:02:39.944153 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.055149 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdzxk\" (UniqueName: \"kubernetes.io/projected/03326baf-b566-4e1d-a8e6-07bb1c1535ad-kube-api-access-gdzxk\") pod \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.055213 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-combined-ca-bundle\") pod \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.055248 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-ssh-key\") pod \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.055316 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-secret-0\") pod \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.055343 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-inventory\") pod \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\" (UID: \"03326baf-b566-4e1d-a8e6-07bb1c1535ad\") " Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.060448 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "03326baf-b566-4e1d-a8e6-07bb1c1535ad" (UID: "03326baf-b566-4e1d-a8e6-07bb1c1535ad"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.061077 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03326baf-b566-4e1d-a8e6-07bb1c1535ad-kube-api-access-gdzxk" (OuterVolumeSpecName: "kube-api-access-gdzxk") pod "03326baf-b566-4e1d-a8e6-07bb1c1535ad" (UID: "03326baf-b566-4e1d-a8e6-07bb1c1535ad"). InnerVolumeSpecName "kube-api-access-gdzxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.078979 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "03326baf-b566-4e1d-a8e6-07bb1c1535ad" (UID: "03326baf-b566-4e1d-a8e6-07bb1c1535ad"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.079778 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-inventory" (OuterVolumeSpecName: "inventory") pod "03326baf-b566-4e1d-a8e6-07bb1c1535ad" (UID: "03326baf-b566-4e1d-a8e6-07bb1c1535ad"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.080196 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "03326baf-b566-4e1d-a8e6-07bb1c1535ad" (UID: "03326baf-b566-4e1d-a8e6-07bb1c1535ad"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.158362 4652 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.158398 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.158416 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdzxk\" (UniqueName: \"kubernetes.io/projected/03326baf-b566-4e1d-a8e6-07bb1c1535ad-kube-api-access-gdzxk\") on node \"crc\" DevicePath \"\"" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.158432 4652 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.158442 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03326baf-b566-4e1d-a8e6-07bb1c1535ad-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.580039 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" event={"ID":"03326baf-b566-4e1d-a8e6-07bb1c1535ad","Type":"ContainerDied","Data":"37f3e66be18432c1ec5047abb61528b30d780d18895dfd5dd6cd162e3a40ff46"} Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.580085 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37f3e66be18432c1ec5047abb61528b30d780d18895dfd5dd6cd162e3a40ff46" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.580151 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-9q65b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.644659 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b"] Dec 05 06:02:40 crc kubenswrapper[4652]: E1205 06:02:40.645194 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerName="extract-utilities" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.645283 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerName="extract-utilities" Dec 05 06:02:40 crc kubenswrapper[4652]: E1205 06:02:40.645347 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03326baf-b566-4e1d-a8e6-07bb1c1535ad" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.645395 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="03326baf-b566-4e1d-a8e6-07bb1c1535ad" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 06:02:40 crc kubenswrapper[4652]: E1205 06:02:40.645444 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerName="extract-content" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.645501 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerName="extract-content" Dec 05 06:02:40 crc kubenswrapper[4652]: E1205 06:02:40.645604 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerName="registry-server" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.645657 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerName="registry-server" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.645902 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="d675111e-d2ad-48a3-833a-f72f531dd4c0" containerName="registry-server" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.645967 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="03326baf-b566-4e1d-a8e6-07bb1c1535ad" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.646676 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.649278 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.649494 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.649498 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.649776 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.649804 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.650074 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.650197 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.657809 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b"] Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.669706 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.669799 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.669866 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwrpx\" (UniqueName: \"kubernetes.io/projected/965041ce-55b9-4c74-b51d-0628c7d13ac9-kube-api-access-fwrpx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.669903 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.669924 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.669970 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.670002 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.670175 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.670198 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771637 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771686 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771730 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771772 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771810 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwrpx\" (UniqueName: \"kubernetes.io/projected/965041ce-55b9-4c74-b51d-0628c7d13ac9-kube-api-access-fwrpx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771836 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771855 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771883 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.771910 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.772906 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.775944 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.776065 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.776425 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.776461 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.776695 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.777174 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.780015 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.791402 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwrpx\" (UniqueName: \"kubernetes.io/projected/965041ce-55b9-4c74-b51d-0628c7d13ac9-kube-api-access-fwrpx\") pod \"nova-edpm-deployment-openstack-edpm-ipam-9c46b\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:40 crc kubenswrapper[4652]: I1205 06:02:40.961397 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:02:41 crc kubenswrapper[4652]: I1205 06:02:41.425927 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b"] Dec 05 06:02:41 crc kubenswrapper[4652]: I1205 06:02:41.431512 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:02:41 crc kubenswrapper[4652]: I1205 06:02:41.591469 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" event={"ID":"965041ce-55b9-4c74-b51d-0628c7d13ac9","Type":"ContainerStarted","Data":"0792397992e237c03f6a0ae4fa6b31c3c5f77914d4cd3bb0283c959d4b023200"} Dec 05 06:02:42 crc kubenswrapper[4652]: I1205 06:02:42.603960 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" event={"ID":"965041ce-55b9-4c74-b51d-0628c7d13ac9","Type":"ContainerStarted","Data":"7c100b24304905c9a334855208a2ba451a664854d8dc1bbdbd1cb6108e6a16be"} Dec 05 06:02:42 crc kubenswrapper[4652]: I1205 06:02:42.625074 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" podStartSLOduration=2.128882905 podStartE2EDuration="2.625053007s" podCreationTimestamp="2025-12-05 06:02:40 +0000 UTC" firstStartedPulling="2025-12-05 06:02:41.43121961 +0000 UTC m=+2163.667949876" lastFinishedPulling="2025-12-05 06:02:41.927389711 +0000 UTC m=+2164.164119978" observedRunningTime="2025-12-05 06:02:42.619940705 +0000 UTC m=+2164.856670973" watchObservedRunningTime="2025-12-05 06:02:42.625053007 +0000 UTC m=+2164.861783265" Dec 05 06:02:46 crc kubenswrapper[4652]: I1205 06:02:46.126745 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:02:46 crc kubenswrapper[4652]: E1205 06:02:46.127579 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:02:59 crc kubenswrapper[4652]: I1205 06:02:59.126197 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:02:59 crc kubenswrapper[4652]: E1205 06:02:59.126986 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:03:11 crc kubenswrapper[4652]: I1205 06:03:11.126320 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:03:11 crc kubenswrapper[4652]: E1205 06:03:11.127096 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:03:22 crc kubenswrapper[4652]: I1205 06:03:22.125754 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:03:22 crc kubenswrapper[4652]: E1205 06:03:22.126347 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:03:36 crc kubenswrapper[4652]: I1205 06:03:36.127315 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:03:36 crc kubenswrapper[4652]: E1205 06:03:36.127963 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:03:50 crc kubenswrapper[4652]: I1205 06:03:50.125852 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:03:50 crc kubenswrapper[4652]: E1205 06:03:50.126429 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.127105 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:04:02 crc kubenswrapper[4652]: E1205 06:04:02.127845 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.354173 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wll9c"] Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.357315 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.367294 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wll9c"] Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.454783 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96pkc\" (UniqueName: \"kubernetes.io/projected/c1c64a98-a7ed-4113-9a15-aec195a223b4-kube-api-access-96pkc\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.454877 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-catalog-content\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.454915 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-utilities\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.557117 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96pkc\" (UniqueName: \"kubernetes.io/projected/c1c64a98-a7ed-4113-9a15-aec195a223b4-kube-api-access-96pkc\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.557201 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-catalog-content\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.557234 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-utilities\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.557752 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-utilities\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.557749 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-catalog-content\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.573598 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96pkc\" (UniqueName: \"kubernetes.io/projected/c1c64a98-a7ed-4113-9a15-aec195a223b4-kube-api-access-96pkc\") pod \"certified-operators-wll9c\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:02 crc kubenswrapper[4652]: I1205 06:04:02.679762 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:03 crc kubenswrapper[4652]: I1205 06:04:03.125042 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wll9c"] Dec 05 06:04:03 crc kubenswrapper[4652]: I1205 06:04:03.231080 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wll9c" event={"ID":"c1c64a98-a7ed-4113-9a15-aec195a223b4","Type":"ContainerStarted","Data":"48e5c8190fa1279eed99d863f3531c7a55e1d41cf10fffd324c561821bb82ca2"} Dec 05 06:04:04 crc kubenswrapper[4652]: I1205 06:04:04.240082 4652 generic.go:334] "Generic (PLEG): container finished" podID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerID="dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0" exitCode=0 Dec 05 06:04:04 crc kubenswrapper[4652]: I1205 06:04:04.240142 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wll9c" event={"ID":"c1c64a98-a7ed-4113-9a15-aec195a223b4","Type":"ContainerDied","Data":"dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0"} Dec 05 06:04:05 crc kubenswrapper[4652]: I1205 06:04:05.250086 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wll9c" event={"ID":"c1c64a98-a7ed-4113-9a15-aec195a223b4","Type":"ContainerStarted","Data":"f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27"} Dec 05 06:04:06 crc kubenswrapper[4652]: I1205 06:04:06.258009 4652 generic.go:334] "Generic (PLEG): container finished" podID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerID="f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27" exitCode=0 Dec 05 06:04:06 crc kubenswrapper[4652]: I1205 06:04:06.258098 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wll9c" event={"ID":"c1c64a98-a7ed-4113-9a15-aec195a223b4","Type":"ContainerDied","Data":"f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27"} Dec 05 06:04:07 crc kubenswrapper[4652]: I1205 06:04:07.287938 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wll9c" event={"ID":"c1c64a98-a7ed-4113-9a15-aec195a223b4","Type":"ContainerStarted","Data":"80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27"} Dec 05 06:04:07 crc kubenswrapper[4652]: I1205 06:04:07.305984 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wll9c" podStartSLOduration=2.773294115 podStartE2EDuration="5.305963291s" podCreationTimestamp="2025-12-05 06:04:02 +0000 UTC" firstStartedPulling="2025-12-05 06:04:04.241720818 +0000 UTC m=+2246.478451085" lastFinishedPulling="2025-12-05 06:04:06.774389994 +0000 UTC m=+2249.011120261" observedRunningTime="2025-12-05 06:04:07.300816835 +0000 UTC m=+2249.537547101" watchObservedRunningTime="2025-12-05 06:04:07.305963291 +0000 UTC m=+2249.542693558" Dec 05 06:04:12 crc kubenswrapper[4652]: I1205 06:04:12.680528 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:12 crc kubenswrapper[4652]: I1205 06:04:12.681215 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:12 crc kubenswrapper[4652]: I1205 06:04:12.714899 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:13 crc kubenswrapper[4652]: I1205 06:04:13.377152 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:13 crc kubenswrapper[4652]: I1205 06:04:13.422515 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wll9c"] Dec 05 06:04:14 crc kubenswrapper[4652]: I1205 06:04:14.125691 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:04:14 crc kubenswrapper[4652]: E1205 06:04:14.125959 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.358882 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wll9c" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerName="registry-server" containerID="cri-o://80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27" gracePeriod=2 Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.754318 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.842574 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-catalog-content\") pod \"c1c64a98-a7ed-4113-9a15-aec195a223b4\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.842675 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96pkc\" (UniqueName: \"kubernetes.io/projected/c1c64a98-a7ed-4113-9a15-aec195a223b4-kube-api-access-96pkc\") pod \"c1c64a98-a7ed-4113-9a15-aec195a223b4\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.842752 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-utilities\") pod \"c1c64a98-a7ed-4113-9a15-aec195a223b4\" (UID: \"c1c64a98-a7ed-4113-9a15-aec195a223b4\") " Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.843493 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-utilities" (OuterVolumeSpecName: "utilities") pod "c1c64a98-a7ed-4113-9a15-aec195a223b4" (UID: "c1c64a98-a7ed-4113-9a15-aec195a223b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.844233 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.859228 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1c64a98-a7ed-4113-9a15-aec195a223b4-kube-api-access-96pkc" (OuterVolumeSpecName: "kube-api-access-96pkc") pod "c1c64a98-a7ed-4113-9a15-aec195a223b4" (UID: "c1c64a98-a7ed-4113-9a15-aec195a223b4"). InnerVolumeSpecName "kube-api-access-96pkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.883027 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1c64a98-a7ed-4113-9a15-aec195a223b4" (UID: "c1c64a98-a7ed-4113-9a15-aec195a223b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.946012 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1c64a98-a7ed-4113-9a15-aec195a223b4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:15 crc kubenswrapper[4652]: I1205 06:04:15.946365 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96pkc\" (UniqueName: \"kubernetes.io/projected/c1c64a98-a7ed-4113-9a15-aec195a223b4-kube-api-access-96pkc\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.370603 4652 generic.go:334] "Generic (PLEG): container finished" podID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerID="80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27" exitCode=0 Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.370804 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wll9c" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.370847 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wll9c" event={"ID":"c1c64a98-a7ed-4113-9a15-aec195a223b4","Type":"ContainerDied","Data":"80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27"} Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.371452 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wll9c" event={"ID":"c1c64a98-a7ed-4113-9a15-aec195a223b4","Type":"ContainerDied","Data":"48e5c8190fa1279eed99d863f3531c7a55e1d41cf10fffd324c561821bb82ca2"} Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.371515 4652 scope.go:117] "RemoveContainer" containerID="80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.392440 4652 scope.go:117] "RemoveContainer" containerID="f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.394484 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wll9c"] Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.402662 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wll9c"] Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.408502 4652 scope.go:117] "RemoveContainer" containerID="dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.449670 4652 scope.go:117] "RemoveContainer" containerID="80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27" Dec 05 06:04:16 crc kubenswrapper[4652]: E1205 06:04:16.450056 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27\": container with ID starting with 80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27 not found: ID does not exist" containerID="80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.450092 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27"} err="failed to get container status \"80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27\": rpc error: code = NotFound desc = could not find container \"80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27\": container with ID starting with 80291b41ddcc431926608765434b40f870e37cad423c529a634f332a16c26f27 not found: ID does not exist" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.450117 4652 scope.go:117] "RemoveContainer" containerID="f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27" Dec 05 06:04:16 crc kubenswrapper[4652]: E1205 06:04:16.450331 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27\": container with ID starting with f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27 not found: ID does not exist" containerID="f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.450357 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27"} err="failed to get container status \"f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27\": rpc error: code = NotFound desc = could not find container \"f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27\": container with ID starting with f8979c8aadb0301f5d7741688e4f4eaa1ddb224c865cdd7743cb2a235f45ff27 not found: ID does not exist" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.450370 4652 scope.go:117] "RemoveContainer" containerID="dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0" Dec 05 06:04:16 crc kubenswrapper[4652]: E1205 06:04:16.450596 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0\": container with ID starting with dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0 not found: ID does not exist" containerID="dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0" Dec 05 06:04:16 crc kubenswrapper[4652]: I1205 06:04:16.450623 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0"} err="failed to get container status \"dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0\": rpc error: code = NotFound desc = could not find container \"dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0\": container with ID starting with dc97621cccb7b59846f1886eec378e264331e7a0c3e047c47ebff3cbd0a1bbe0 not found: ID does not exist" Dec 05 06:04:18 crc kubenswrapper[4652]: I1205 06:04:18.135365 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" path="/var/lib/kubelet/pods/c1c64a98-a7ed-4113-9a15-aec195a223b4/volumes" Dec 05 06:04:29 crc kubenswrapper[4652]: I1205 06:04:29.125244 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:04:29 crc kubenswrapper[4652]: E1205 06:04:29.125900 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:04:34 crc kubenswrapper[4652]: I1205 06:04:34.519352 4652 generic.go:334] "Generic (PLEG): container finished" podID="965041ce-55b9-4c74-b51d-0628c7d13ac9" containerID="7c100b24304905c9a334855208a2ba451a664854d8dc1bbdbd1cb6108e6a16be" exitCode=0 Dec 05 06:04:34 crc kubenswrapper[4652]: I1205 06:04:34.519428 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" event={"ID":"965041ce-55b9-4c74-b51d-0628c7d13ac9","Type":"ContainerDied","Data":"7c100b24304905c9a334855208a2ba451a664854d8dc1bbdbd1cb6108e6a16be"} Dec 05 06:04:35 crc kubenswrapper[4652]: I1205 06:04:35.867494 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049044 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-combined-ca-bundle\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049107 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwrpx\" (UniqueName: \"kubernetes.io/projected/965041ce-55b9-4c74-b51d-0628c7d13ac9-kube-api-access-fwrpx\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049194 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-0\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049220 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-inventory\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049257 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-1\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049283 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-extra-config-0\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049329 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-ssh-key\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049354 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-1\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.049375 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-0\") pod \"965041ce-55b9-4c74-b51d-0628c7d13ac9\" (UID: \"965041ce-55b9-4c74-b51d-0628c7d13ac9\") " Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.055971 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/965041ce-55b9-4c74-b51d-0628c7d13ac9-kube-api-access-fwrpx" (OuterVolumeSpecName: "kube-api-access-fwrpx") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "kube-api-access-fwrpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.056153 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.075361 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.079059 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.079891 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.080290 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.080963 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-inventory" (OuterVolumeSpecName: "inventory") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.081652 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.084861 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "965041ce-55b9-4c74-b51d-0628c7d13ac9" (UID: "965041ce-55b9-4c74-b51d-0628c7d13ac9"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152179 4652 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152208 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwrpx\" (UniqueName: \"kubernetes.io/projected/965041ce-55b9-4c74-b51d-0628c7d13ac9-kube-api-access-fwrpx\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152218 4652 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152228 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152237 4652 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152246 4652 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152253 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152262 4652 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.152269 4652 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/965041ce-55b9-4c74-b51d-0628c7d13ac9-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.537669 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" event={"ID":"965041ce-55b9-4c74-b51d-0628c7d13ac9","Type":"ContainerDied","Data":"0792397992e237c03f6a0ae4fa6b31c3c5f77914d4cd3bb0283c959d4b023200"} Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.537959 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0792397992e237c03f6a0ae4fa6b31c3c5f77914d4cd3bb0283c959d4b023200" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.537729 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-9c46b" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.621112 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc"] Dec 05 06:04:36 crc kubenswrapper[4652]: E1205 06:04:36.621501 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="965041ce-55b9-4c74-b51d-0628c7d13ac9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.621522 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="965041ce-55b9-4c74-b51d-0628c7d13ac9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 06:04:36 crc kubenswrapper[4652]: E1205 06:04:36.621569 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerName="extract-utilities" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.621576 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerName="extract-utilities" Dec 05 06:04:36 crc kubenswrapper[4652]: E1205 06:04:36.621585 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerName="extract-content" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.621592 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerName="extract-content" Dec 05 06:04:36 crc kubenswrapper[4652]: E1205 06:04:36.621628 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerName="registry-server" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.621634 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerName="registry-server" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.621822 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="965041ce-55b9-4c74-b51d-0628c7d13ac9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.621844 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1c64a98-a7ed-4113-9a15-aec195a223b4" containerName="registry-server" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.622450 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.628179 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-cg9vq" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.628355 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.628906 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.629052 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.629151 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.638204 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc"] Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.764329 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.764371 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.764403 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-284mg\" (UniqueName: \"kubernetes.io/projected/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-kube-api-access-284mg\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.765178 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.765315 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.765521 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.765642 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.866907 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.866944 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.866990 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.867010 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.867035 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-284mg\" (UniqueName: \"kubernetes.io/projected/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-kube-api-access-284mg\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.867064 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.867103 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.871162 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.871274 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.871292 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.871761 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.872500 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.872682 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.884135 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-284mg\" (UniqueName: \"kubernetes.io/projected/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-kube-api-access-284mg\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:36 crc kubenswrapper[4652]: I1205 06:04:36.941378 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:04:37 crc kubenswrapper[4652]: I1205 06:04:37.414774 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc"] Dec 05 06:04:37 crc kubenswrapper[4652]: I1205 06:04:37.549928 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" event={"ID":"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c","Type":"ContainerStarted","Data":"f12e76b3a1758c520b5ee4f52702dbf4264d3ad14d93bc2eb0d629caf5988442"} Dec 05 06:04:38 crc kubenswrapper[4652]: I1205 06:04:38.561404 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" event={"ID":"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c","Type":"ContainerStarted","Data":"4bff42a183e1bd851934308be4669dbc30bee9040d08331680c87075693cbf1c"} Dec 05 06:04:38 crc kubenswrapper[4652]: I1205 06:04:38.580323 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" podStartSLOduration=2.070852361 podStartE2EDuration="2.580305903s" podCreationTimestamp="2025-12-05 06:04:36 +0000 UTC" firstStartedPulling="2025-12-05 06:04:37.421034811 +0000 UTC m=+2279.657765077" lastFinishedPulling="2025-12-05 06:04:37.930488352 +0000 UTC m=+2280.167218619" observedRunningTime="2025-12-05 06:04:38.575152175 +0000 UTC m=+2280.811882442" watchObservedRunningTime="2025-12-05 06:04:38.580305903 +0000 UTC m=+2280.817036170" Dec 05 06:04:41 crc kubenswrapper[4652]: I1205 06:04:41.995976 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nq84q"] Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.006354 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.018934 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nq84q"] Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.126393 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:04:42 crc kubenswrapper[4652]: E1205 06:04:42.126732 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.179426 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qgdwf"] Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.181629 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.189287 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qgdwf"] Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.193486 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-catalog-content\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.193584 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77lpt\" (UniqueName: \"kubernetes.io/projected/e7a51abf-533c-4ed3-8506-7902eccf35d6-kube-api-access-77lpt\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.194150 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-utilities\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.297004 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b14b958-34f5-4254-8dc0-9666617dfd56-utilities\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.297125 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b14b958-34f5-4254-8dc0-9666617dfd56-catalog-content\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.297160 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-catalog-content\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.297206 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77lpt\" (UniqueName: \"kubernetes.io/projected/e7a51abf-533c-4ed3-8506-7902eccf35d6-kube-api-access-77lpt\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.297306 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kph2\" (UniqueName: \"kubernetes.io/projected/3b14b958-34f5-4254-8dc0-9666617dfd56-kube-api-access-8kph2\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.297339 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-utilities\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.298187 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-catalog-content\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.298286 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-utilities\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.315897 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77lpt\" (UniqueName: \"kubernetes.io/projected/e7a51abf-533c-4ed3-8506-7902eccf35d6-kube-api-access-77lpt\") pod \"redhat-operators-nq84q\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.328836 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.399252 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kph2\" (UniqueName: \"kubernetes.io/projected/3b14b958-34f5-4254-8dc0-9666617dfd56-kube-api-access-8kph2\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.399359 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b14b958-34f5-4254-8dc0-9666617dfd56-utilities\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.399402 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b14b958-34f5-4254-8dc0-9666617dfd56-catalog-content\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.399828 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b14b958-34f5-4254-8dc0-9666617dfd56-utilities\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.399842 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b14b958-34f5-4254-8dc0-9666617dfd56-catalog-content\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.417864 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kph2\" (UniqueName: \"kubernetes.io/projected/3b14b958-34f5-4254-8dc0-9666617dfd56-kube-api-access-8kph2\") pod \"community-operators-qgdwf\" (UID: \"3b14b958-34f5-4254-8dc0-9666617dfd56\") " pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.494269 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.798108 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nq84q"] Dec 05 06:04:42 crc kubenswrapper[4652]: W1205 06:04:42.802643 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7a51abf_533c_4ed3_8506_7902eccf35d6.slice/crio-fbb0c542aa061d1bf2bc16f316f18873736518d6e2b25f252737a8dbd0f65b84 WatchSource:0}: Error finding container fbb0c542aa061d1bf2bc16f316f18873736518d6e2b25f252737a8dbd0f65b84: Status 404 returned error can't find the container with id fbb0c542aa061d1bf2bc16f316f18873736518d6e2b25f252737a8dbd0f65b84 Dec 05 06:04:42 crc kubenswrapper[4652]: I1205 06:04:42.985289 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qgdwf"] Dec 05 06:04:43 crc kubenswrapper[4652]: W1205 06:04:43.005445 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b14b958_34f5_4254_8dc0_9666617dfd56.slice/crio-f761bb6fb9ab1430e89e143621a67c105674aabc3b79b187193347a330d95ef6 WatchSource:0}: Error finding container f761bb6fb9ab1430e89e143621a67c105674aabc3b79b187193347a330d95ef6: Status 404 returned error can't find the container with id f761bb6fb9ab1430e89e143621a67c105674aabc3b79b187193347a330d95ef6 Dec 05 06:04:43 crc kubenswrapper[4652]: I1205 06:04:43.610061 4652 generic.go:334] "Generic (PLEG): container finished" podID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerID="4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415" exitCode=0 Dec 05 06:04:43 crc kubenswrapper[4652]: I1205 06:04:43.610158 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nq84q" event={"ID":"e7a51abf-533c-4ed3-8506-7902eccf35d6","Type":"ContainerDied","Data":"4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415"} Dec 05 06:04:43 crc kubenswrapper[4652]: I1205 06:04:43.610210 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nq84q" event={"ID":"e7a51abf-533c-4ed3-8506-7902eccf35d6","Type":"ContainerStarted","Data":"fbb0c542aa061d1bf2bc16f316f18873736518d6e2b25f252737a8dbd0f65b84"} Dec 05 06:04:43 crc kubenswrapper[4652]: I1205 06:04:43.611747 4652 generic.go:334] "Generic (PLEG): container finished" podID="3b14b958-34f5-4254-8dc0-9666617dfd56" containerID="be6d548710c3253d867e97b879520232f82cd27c67e6cbee9b68d97882e0ba9a" exitCode=0 Dec 05 06:04:43 crc kubenswrapper[4652]: I1205 06:04:43.611776 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qgdwf" event={"ID":"3b14b958-34f5-4254-8dc0-9666617dfd56","Type":"ContainerDied","Data":"be6d548710c3253d867e97b879520232f82cd27c67e6cbee9b68d97882e0ba9a"} Dec 05 06:04:43 crc kubenswrapper[4652]: I1205 06:04:43.611809 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qgdwf" event={"ID":"3b14b958-34f5-4254-8dc0-9666617dfd56","Type":"ContainerStarted","Data":"f761bb6fb9ab1430e89e143621a67c105674aabc3b79b187193347a330d95ef6"} Dec 05 06:04:44 crc kubenswrapper[4652]: I1205 06:04:44.622881 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nq84q" event={"ID":"e7a51abf-533c-4ed3-8506-7902eccf35d6","Type":"ContainerStarted","Data":"8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050"} Dec 05 06:04:47 crc kubenswrapper[4652]: I1205 06:04:47.652794 4652 generic.go:334] "Generic (PLEG): container finished" podID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerID="8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050" exitCode=0 Dec 05 06:04:47 crc kubenswrapper[4652]: I1205 06:04:47.652864 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nq84q" event={"ID":"e7a51abf-533c-4ed3-8506-7902eccf35d6","Type":"ContainerDied","Data":"8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050"} Dec 05 06:04:47 crc kubenswrapper[4652]: I1205 06:04:47.655689 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qgdwf" event={"ID":"3b14b958-34f5-4254-8dc0-9666617dfd56","Type":"ContainerStarted","Data":"3ef3530d98ee613251a4383f1d6da2732b48abe3a70ea9b7351d7ef05012898a"} Dec 05 06:04:48 crc kubenswrapper[4652]: I1205 06:04:48.665430 4652 generic.go:334] "Generic (PLEG): container finished" podID="3b14b958-34f5-4254-8dc0-9666617dfd56" containerID="3ef3530d98ee613251a4383f1d6da2732b48abe3a70ea9b7351d7ef05012898a" exitCode=0 Dec 05 06:04:48 crc kubenswrapper[4652]: I1205 06:04:48.665509 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qgdwf" event={"ID":"3b14b958-34f5-4254-8dc0-9666617dfd56","Type":"ContainerDied","Data":"3ef3530d98ee613251a4383f1d6da2732b48abe3a70ea9b7351d7ef05012898a"} Dec 05 06:04:48 crc kubenswrapper[4652]: I1205 06:04:48.668626 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nq84q" event={"ID":"e7a51abf-533c-4ed3-8506-7902eccf35d6","Type":"ContainerStarted","Data":"8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af"} Dec 05 06:04:48 crc kubenswrapper[4652]: I1205 06:04:48.702941 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nq84q" podStartSLOduration=3.173583157 podStartE2EDuration="7.702923358s" podCreationTimestamp="2025-12-05 06:04:41 +0000 UTC" firstStartedPulling="2025-12-05 06:04:43.611719934 +0000 UTC m=+2285.848450201" lastFinishedPulling="2025-12-05 06:04:48.141060135 +0000 UTC m=+2290.377790402" observedRunningTime="2025-12-05 06:04:48.69613147 +0000 UTC m=+2290.932861738" watchObservedRunningTime="2025-12-05 06:04:48.702923358 +0000 UTC m=+2290.939653625" Dec 05 06:04:49 crc kubenswrapper[4652]: I1205 06:04:49.678601 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qgdwf" event={"ID":"3b14b958-34f5-4254-8dc0-9666617dfd56","Type":"ContainerStarted","Data":"21b7c17d83981491e06b4a682265d1298b25c0ca9d0c9e4c41f2dcf56806687d"} Dec 05 06:04:49 crc kubenswrapper[4652]: I1205 06:04:49.693872 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qgdwf" podStartSLOduration=2.146554416 podStartE2EDuration="7.693853148s" podCreationTimestamp="2025-12-05 06:04:42 +0000 UTC" firstStartedPulling="2025-12-05 06:04:43.613670821 +0000 UTC m=+2285.850401078" lastFinishedPulling="2025-12-05 06:04:49.160969543 +0000 UTC m=+2291.397699810" observedRunningTime="2025-12-05 06:04:49.692902992 +0000 UTC m=+2291.929633260" watchObservedRunningTime="2025-12-05 06:04:49.693853148 +0000 UTC m=+2291.930583415" Dec 05 06:04:52 crc kubenswrapper[4652]: I1205 06:04:52.329142 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:52 crc kubenswrapper[4652]: I1205 06:04:52.329861 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:04:52 crc kubenswrapper[4652]: I1205 06:04:52.495399 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:52 crc kubenswrapper[4652]: I1205 06:04:52.495457 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:52 crc kubenswrapper[4652]: I1205 06:04:52.532719 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:04:53 crc kubenswrapper[4652]: I1205 06:04:53.362922 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nq84q" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="registry-server" probeResult="failure" output=< Dec 05 06:04:53 crc kubenswrapper[4652]: timeout: failed to connect service ":50051" within 1s Dec 05 06:04:53 crc kubenswrapper[4652]: > Dec 05 06:04:54 crc kubenswrapper[4652]: I1205 06:04:54.126612 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:04:54 crc kubenswrapper[4652]: E1205 06:04:54.126977 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:05:02 crc kubenswrapper[4652]: I1205 06:05:02.366048 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:05:02 crc kubenswrapper[4652]: I1205 06:05:02.399843 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:05:02 crc kubenswrapper[4652]: I1205 06:05:02.530241 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qgdwf" Dec 05 06:05:02 crc kubenswrapper[4652]: I1205 06:05:02.599816 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nq84q"] Dec 05 06:05:03 crc kubenswrapper[4652]: I1205 06:05:03.813192 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nq84q" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="registry-server" containerID="cri-o://8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af" gracePeriod=2 Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.198320 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.401722 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77lpt\" (UniqueName: \"kubernetes.io/projected/e7a51abf-533c-4ed3-8506-7902eccf35d6-kube-api-access-77lpt\") pod \"e7a51abf-533c-4ed3-8506-7902eccf35d6\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.401953 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-utilities\") pod \"e7a51abf-533c-4ed3-8506-7902eccf35d6\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.402080 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-catalog-content\") pod \"e7a51abf-533c-4ed3-8506-7902eccf35d6\" (UID: \"e7a51abf-533c-4ed3-8506-7902eccf35d6\") " Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.402572 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-utilities" (OuterVolumeSpecName: "utilities") pod "e7a51abf-533c-4ed3-8506-7902eccf35d6" (UID: "e7a51abf-533c-4ed3-8506-7902eccf35d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.402807 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.406887 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7a51abf-533c-4ed3-8506-7902eccf35d6-kube-api-access-77lpt" (OuterVolumeSpecName: "kube-api-access-77lpt") pod "e7a51abf-533c-4ed3-8506-7902eccf35d6" (UID: "e7a51abf-533c-4ed3-8506-7902eccf35d6"). InnerVolumeSpecName "kube-api-access-77lpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.420350 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qgdwf"] Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.478485 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7a51abf-533c-4ed3-8506-7902eccf35d6" (UID: "e7a51abf-533c-4ed3-8506-7902eccf35d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.505406 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a51abf-533c-4ed3-8506-7902eccf35d6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.505435 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77lpt\" (UniqueName: \"kubernetes.io/projected/e7a51abf-533c-4ed3-8506-7902eccf35d6-kube-api-access-77lpt\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.801280 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-skn8m"] Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.801576 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-skn8m" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerName="registry-server" containerID="cri-o://0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a" gracePeriod=2 Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.822926 4652 generic.go:334] "Generic (PLEG): container finished" podID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerID="8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af" exitCode=0 Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.822982 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nq84q" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.823001 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nq84q" event={"ID":"e7a51abf-533c-4ed3-8506-7902eccf35d6","Type":"ContainerDied","Data":"8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af"} Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.823637 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nq84q" event={"ID":"e7a51abf-533c-4ed3-8506-7902eccf35d6","Type":"ContainerDied","Data":"fbb0c542aa061d1bf2bc16f316f18873736518d6e2b25f252737a8dbd0f65b84"} Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.823669 4652 scope.go:117] "RemoveContainer" containerID="8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.841824 4652 scope.go:117] "RemoveContainer" containerID="8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050" Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.931637 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nq84q"] Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.944769 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nq84q"] Dec 05 06:05:04 crc kubenswrapper[4652]: I1205 06:05:04.960661 4652 scope.go:117] "RemoveContainer" containerID="4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.040861 4652 scope.go:117] "RemoveContainer" containerID="8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af" Dec 05 06:05:05 crc kubenswrapper[4652]: E1205 06:05:05.041312 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af\": container with ID starting with 8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af not found: ID does not exist" containerID="8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.041355 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af"} err="failed to get container status \"8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af\": rpc error: code = NotFound desc = could not find container \"8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af\": container with ID starting with 8737ca6a520aaa36fe05090759850ee073cbb8192b89adf2d62f8276fdeba2af not found: ID does not exist" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.041384 4652 scope.go:117] "RemoveContainer" containerID="8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050" Dec 05 06:05:05 crc kubenswrapper[4652]: E1205 06:05:05.041677 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050\": container with ID starting with 8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050 not found: ID does not exist" containerID="8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.041712 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050"} err="failed to get container status \"8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050\": rpc error: code = NotFound desc = could not find container \"8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050\": container with ID starting with 8a42c04e2ace7285f0516b1294627f2a7eadaf48b749dc4c534ce85cb5b2d050 not found: ID does not exist" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.041751 4652 scope.go:117] "RemoveContainer" containerID="4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415" Dec 05 06:05:05 crc kubenswrapper[4652]: E1205 06:05:05.042215 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415\": container with ID starting with 4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415 not found: ID does not exist" containerID="4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.042256 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415"} err="failed to get container status \"4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415\": rpc error: code = NotFound desc = could not find container \"4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415\": container with ID starting with 4a80d3114eed811978a02d473276f3adbd29d24bae0d8d8e377342e629162415 not found: ID does not exist" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.344942 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skn8m" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.422936 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwzzj\" (UniqueName: \"kubernetes.io/projected/f27dfcad-0744-4e4b-afd8-602951a7e2a7-kube-api-access-qwzzj\") pod \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.423087 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-utilities\") pod \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.423176 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-catalog-content\") pod \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\" (UID: \"f27dfcad-0744-4e4b-afd8-602951a7e2a7\") " Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.424768 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-utilities" (OuterVolumeSpecName: "utilities") pod "f27dfcad-0744-4e4b-afd8-602951a7e2a7" (UID: "f27dfcad-0744-4e4b-afd8-602951a7e2a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.429549 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f27dfcad-0744-4e4b-afd8-602951a7e2a7-kube-api-access-qwzzj" (OuterVolumeSpecName: "kube-api-access-qwzzj") pod "f27dfcad-0744-4e4b-afd8-602951a7e2a7" (UID: "f27dfcad-0744-4e4b-afd8-602951a7e2a7"). InnerVolumeSpecName "kube-api-access-qwzzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.477885 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f27dfcad-0744-4e4b-afd8-602951a7e2a7" (UID: "f27dfcad-0744-4e4b-afd8-602951a7e2a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.526020 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.526120 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwzzj\" (UniqueName: \"kubernetes.io/projected/f27dfcad-0744-4e4b-afd8-602951a7e2a7-kube-api-access-qwzzj\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.526175 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f27dfcad-0744-4e4b-afd8-602951a7e2a7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.835045 4652 generic.go:334] "Generic (PLEG): container finished" podID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerID="0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a" exitCode=0 Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.835103 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skn8m" event={"ID":"f27dfcad-0744-4e4b-afd8-602951a7e2a7","Type":"ContainerDied","Data":"0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a"} Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.835112 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-skn8m" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.835138 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-skn8m" event={"ID":"f27dfcad-0744-4e4b-afd8-602951a7e2a7","Type":"ContainerDied","Data":"ccfec55b3f09d8af36e230b82fb3ca58bc7fe354f594bb3c74c3a26fba689f6a"} Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.835159 4652 scope.go:117] "RemoveContainer" containerID="0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.863133 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-skn8m"] Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.867107 4652 scope.go:117] "RemoveContainer" containerID="935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.870684 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-skn8m"] Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.887413 4652 scope.go:117] "RemoveContainer" containerID="1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.902951 4652 scope.go:117] "RemoveContainer" containerID="0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a" Dec 05 06:05:05 crc kubenswrapper[4652]: E1205 06:05:05.903274 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a\": container with ID starting with 0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a not found: ID does not exist" containerID="0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.903372 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a"} err="failed to get container status \"0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a\": rpc error: code = NotFound desc = could not find container \"0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a\": container with ID starting with 0b6a293dc31627337896f7143f57abe6ae127417807f6cbae4f92a4704830d1a not found: ID does not exist" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.903449 4652 scope.go:117] "RemoveContainer" containerID="935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb" Dec 05 06:05:05 crc kubenswrapper[4652]: E1205 06:05:05.903931 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb\": container with ID starting with 935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb not found: ID does not exist" containerID="935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.903966 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb"} err="failed to get container status \"935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb\": rpc error: code = NotFound desc = could not find container \"935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb\": container with ID starting with 935a67cddec53604bc567dc393fbff3c86419641b5215818743e2b95a03db1fb not found: ID does not exist" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.903986 4652 scope.go:117] "RemoveContainer" containerID="1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a" Dec 05 06:05:05 crc kubenswrapper[4652]: E1205 06:05:05.904264 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a\": container with ID starting with 1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a not found: ID does not exist" containerID="1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a" Dec 05 06:05:05 crc kubenswrapper[4652]: I1205 06:05:05.904298 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a"} err="failed to get container status \"1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a\": rpc error: code = NotFound desc = could not find container \"1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a\": container with ID starting with 1e40e0041e1d1636a4f179a7d122f712496bd83e8b7752945227019e3351d32a not found: ID does not exist" Dec 05 06:05:06 crc kubenswrapper[4652]: I1205 06:05:06.136804 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" path="/var/lib/kubelet/pods/e7a51abf-533c-4ed3-8506-7902eccf35d6/volumes" Dec 05 06:05:06 crc kubenswrapper[4652]: I1205 06:05:06.137447 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" path="/var/lib/kubelet/pods/f27dfcad-0744-4e4b-afd8-602951a7e2a7/volumes" Dec 05 06:05:09 crc kubenswrapper[4652]: I1205 06:05:09.126702 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:05:09 crc kubenswrapper[4652]: E1205 06:05:09.129669 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:05:23 crc kubenswrapper[4652]: I1205 06:05:23.126011 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:05:23 crc kubenswrapper[4652]: E1205 06:05:23.127301 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:05:38 crc kubenswrapper[4652]: I1205 06:05:38.132864 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:05:38 crc kubenswrapper[4652]: E1205 06:05:38.133784 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:05:49 crc kubenswrapper[4652]: I1205 06:05:49.125441 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:05:49 crc kubenswrapper[4652]: E1205 06:05:49.126414 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:06:01 crc kubenswrapper[4652]: I1205 06:06:01.126140 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:06:01 crc kubenswrapper[4652]: E1205 06:06:01.126991 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:06:10 crc kubenswrapper[4652]: I1205 06:06:10.408587 4652 generic.go:334] "Generic (PLEG): container finished" podID="1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" containerID="4bff42a183e1bd851934308be4669dbc30bee9040d08331680c87075693cbf1c" exitCode=0 Dec 05 06:06:10 crc kubenswrapper[4652]: I1205 06:06:10.408661 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" event={"ID":"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c","Type":"ContainerDied","Data":"4bff42a183e1bd851934308be4669dbc30bee9040d08331680c87075693cbf1c"} Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.747118 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.759719 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-telemetry-combined-ca-bundle\") pod \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.759842 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-1\") pod \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.767905 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" (UID: "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.786602 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" (UID: "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.861004 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-inventory\") pod \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.861093 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-284mg\" (UniqueName: \"kubernetes.io/projected/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-kube-api-access-284mg\") pod \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.861143 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-0\") pod \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.861163 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ssh-key\") pod \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.861255 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-2\") pod \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\" (UID: \"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c\") " Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.862051 4652 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.862069 4652 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.864369 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-kube-api-access-284mg" (OuterVolumeSpecName: "kube-api-access-284mg") pod "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" (UID: "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c"). InnerVolumeSpecName "kube-api-access-284mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.883754 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" (UID: "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.884193 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-inventory" (OuterVolumeSpecName: "inventory") pod "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" (UID: "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.884918 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" (UID: "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.888665 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" (UID: "1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.966015 4652 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.966345 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.966364 4652 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.966379 4652 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:11 crc kubenswrapper[4652]: I1205 06:06:11.966395 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-284mg\" (UniqueName: \"kubernetes.io/projected/1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c-kube-api-access-284mg\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:12 crc kubenswrapper[4652]: I1205 06:06:12.126952 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:06:12 crc kubenswrapper[4652]: E1205 06:06:12.127442 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:06:12 crc kubenswrapper[4652]: I1205 06:06:12.429615 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" event={"ID":"1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c","Type":"ContainerDied","Data":"f12e76b3a1758c520b5ee4f52702dbf4264d3ad14d93bc2eb0d629caf5988442"} Dec 05 06:06:12 crc kubenswrapper[4652]: I1205 06:06:12.429674 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f12e76b3a1758c520b5ee4f52702dbf4264d3ad14d93bc2eb0d629caf5988442" Dec 05 06:06:12 crc kubenswrapper[4652]: I1205 06:06:12.429674 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc" Dec 05 06:06:24 crc kubenswrapper[4652]: I1205 06:06:24.127118 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:06:24 crc kubenswrapper[4652]: E1205 06:06:24.128396 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:06:35 crc kubenswrapper[4652]: I1205 06:06:35.125961 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:06:35 crc kubenswrapper[4652]: E1205 06:06:35.126841 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.464109 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.464988 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="thanos-sidecar" containerID="cri-o://da54de35e0f9f9c2fd78f2aa8e0e8ea246aa354058e1bcc0769b060a37048646" gracePeriod=600 Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.464993 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="config-reloader" containerID="cri-o://7060044012efa9e072cd50a01291408aa3144ecad2fb0032419f049bcb1aa44a" gracePeriod=600 Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.464887 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="prometheus" containerID="cri-o://42c8e400642241f60709f60f577a3079ef4abcfdf09fb72e32dbb3be5eecfe5c" gracePeriod=600 Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.704499 4652 generic.go:334] "Generic (PLEG): container finished" podID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerID="da54de35e0f9f9c2fd78f2aa8e0e8ea246aa354058e1bcc0769b060a37048646" exitCode=0 Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.704631 4652 generic.go:334] "Generic (PLEG): container finished" podID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerID="7060044012efa9e072cd50a01291408aa3144ecad2fb0032419f049bcb1aa44a" exitCode=0 Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.704642 4652 generic.go:334] "Generic (PLEG): container finished" podID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerID="42c8e400642241f60709f60f577a3079ef4abcfdf09fb72e32dbb3be5eecfe5c" exitCode=0 Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.704666 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerDied","Data":"da54de35e0f9f9c2fd78f2aa8e0e8ea246aa354058e1bcc0769b060a37048646"} Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.704719 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerDied","Data":"7060044012efa9e072cd50a01291408aa3144ecad2fb0032419f049bcb1aa44a"} Dec 05 06:06:45 crc kubenswrapper[4652]: I1205 06:06:45.704733 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerDied","Data":"42c8e400642241f60709f60f577a3079ef4abcfdf09fb72e32dbb3be5eecfe5c"} Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.006751 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.126192 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.126423 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.171959 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.172059 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.172090 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-tls-assets\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.172171 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2bbbfb38-753e-463f-821f-1a98b2d68d38-config-out\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.172197 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2bbbfb38-753e-463f-821f-1a98b2d68d38-prometheus-metric-storage-rulefiles-0\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.172239 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-thanos-prometheus-http-client-file\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.172276 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbqbs\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-kube-api-access-wbqbs\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.172385 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.172424 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.173309 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-secret-combined-ca-bundle\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.173302 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bbbfb38-753e-463f-821f-1a98b2d68d38-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.173351 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-config\") pod \"2bbbfb38-753e-463f-821f-1a98b2d68d38\" (UID: \"2bbbfb38-753e-463f-821f-1a98b2d68d38\") " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.173850 4652 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/2bbbfb38-753e-463f-821f-1a98b2d68d38-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.178102 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.178370 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-kube-api-access-wbqbs" (OuterVolumeSpecName: "kube-api-access-wbqbs") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "kube-api-access-wbqbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.178830 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.178955 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.179626 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bbbfb38-753e-463f-821f-1a98b2d68d38-config-out" (OuterVolumeSpecName: "config-out") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.180899 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.181149 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-config" (OuterVolumeSpecName: "config") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.181404 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.191678 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.239328 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config" (OuterVolumeSpecName: "web-config") pod "2bbbfb38-753e-463f-821f-1a98b2d68d38" (UID: "2bbbfb38-753e-463f-821f-1a98b2d68d38"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276824 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") on node \"crc\" " Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276865 4652 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276894 4652 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276905 4652 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2bbbfb38-753e-463f-821f-1a98b2d68d38-config-out\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276915 4652 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276924 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbqbs\" (UniqueName: \"kubernetes.io/projected/2bbbfb38-753e-463f-821f-1a98b2d68d38-kube-api-access-wbqbs\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276935 4652 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276943 4652 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276987 4652 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.276995 4652 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2bbbfb38-753e-463f-821f-1a98b2d68d38-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.297083 4652 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.297218 4652 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc") on node "crc" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.379268 4652 reconciler_common.go:293] "Volume detached for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.714907 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"2bbbfb38-753e-463f-821f-1a98b2d68d38","Type":"ContainerDied","Data":"74afa5d80471d42342d2a11ce1fbb3c2fe1f7b6d02cd36b854b73564fd611c00"} Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.714971 4652 scope.go:117] "RemoveContainer" containerID="da54de35e0f9f9c2fd78f2aa8e0e8ea246aa354058e1bcc0769b060a37048646" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.714973 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.750884 4652 scope.go:117] "RemoveContainer" containerID="7060044012efa9e072cd50a01291408aa3144ecad2fb0032419f049bcb1aa44a" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.775963 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.779295 4652 scope.go:117] "RemoveContainer" containerID="42c8e400642241f60709f60f577a3079ef4abcfdf09fb72e32dbb3be5eecfe5c" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.784354 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.792735 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793160 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="extract-utilities" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793178 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="extract-utilities" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793188 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="config-reloader" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793194 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="config-reloader" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793210 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerName="extract-utilities" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793216 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerName="extract-utilities" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793231 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerName="extract-content" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793237 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerName="extract-content" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793249 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793255 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793269 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="thanos-sidecar" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793274 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="thanos-sidecar" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793286 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="extract-content" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793292 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="extract-content" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793304 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="init-config-reloader" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793310 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="init-config-reloader" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793317 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="registry-server" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793322 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="registry-server" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793336 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerName="registry-server" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793341 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerName="registry-server" Dec 05 06:06:46 crc kubenswrapper[4652]: E1205 06:06:46.793350 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="prometheus" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793355 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="prometheus" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793590 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="config-reloader" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793600 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7a51abf-533c-4ed3-8506-7902eccf35d6" containerName="registry-server" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793608 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="prometheus" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793617 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793626 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f27dfcad-0744-4e4b-afd8-602951a7e2a7" containerName="registry-server" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.793640 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" containerName="thanos-sidecar" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.795427 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.800212 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.805761 4652 scope.go:117] "RemoveContainer" containerID="59e3b1f810ecb3612661b9ec5e1805957f6eebe1133c994ef57c7c832a10c0e7" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.806019 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.806235 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.806019 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.806506 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-bj7vd" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.806784 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.808913 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.988720 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989016 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nspc4\" (UniqueName: \"kubernetes.io/projected/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-kube-api-access-nspc4\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989076 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-config\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989095 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989116 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989166 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989256 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989446 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989477 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989501 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:46 crc kubenswrapper[4652]: I1205 06:06:46.989673 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092211 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092275 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092307 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092375 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092415 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092469 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nspc4\" (UniqueName: \"kubernetes.io/projected/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-kube-api-access-nspc4\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092541 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-config\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092584 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.092612 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.093014 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.093073 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.093372 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.097103 4652 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.097169 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0495d50add23caafd61e72bca8d5e7274e8f1a3737d1b608160f79f869a86c50/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.100468 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.101162 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-config\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.101951 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.103258 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.105486 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.107245 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.107353 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.107531 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.109866 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nspc4\" (UniqueName: \"kubernetes.io/projected/64c75413-ccc5-4f2b-8ce7-e3891c408fc8-kube-api-access-nspc4\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.146815 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e338bc63-c07e-46ff-8c01-ed6e610f12dc\") pod \"prometheus-metric-storage-0\" (UID: \"64c75413-ccc5-4f2b-8ce7-e3891c408fc8\") " pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.157732 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.559536 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 06:06:47 crc kubenswrapper[4652]: I1205 06:06:47.724756 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"64c75413-ccc5-4f2b-8ce7-e3891c408fc8","Type":"ContainerStarted","Data":"1fe96aed140560b7555fa3e8f0324f081100837331fb034ca628537e2e130795"} Dec 05 06:06:48 crc kubenswrapper[4652]: I1205 06:06:48.135539 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bbbfb38-753e-463f-821f-1a98b2d68d38" path="/var/lib/kubelet/pods/2bbbfb38-753e-463f-821f-1a98b2d68d38/volumes" Dec 05 06:06:50 crc kubenswrapper[4652]: I1205 06:06:50.750976 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"64c75413-ccc5-4f2b-8ce7-e3891c408fc8","Type":"ContainerStarted","Data":"af50661fcbd521b8d9927673d2e7518a437a0ba9f8586eee265856b801438b77"} Dec 05 06:06:55 crc kubenswrapper[4652]: I1205 06:06:55.796482 4652 generic.go:334] "Generic (PLEG): container finished" podID="64c75413-ccc5-4f2b-8ce7-e3891c408fc8" containerID="af50661fcbd521b8d9927673d2e7518a437a0ba9f8586eee265856b801438b77" exitCode=0 Dec 05 06:06:55 crc kubenswrapper[4652]: I1205 06:06:55.796608 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"64c75413-ccc5-4f2b-8ce7-e3891c408fc8","Type":"ContainerDied","Data":"af50661fcbd521b8d9927673d2e7518a437a0ba9f8586eee265856b801438b77"} Dec 05 06:06:56 crc kubenswrapper[4652]: I1205 06:06:56.806189 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"64c75413-ccc5-4f2b-8ce7-e3891c408fc8","Type":"ContainerStarted","Data":"c9962d6a61944e735ee67e321b9cd8cb0aeda667e2f81dc153c9fc25cb67639e"} Dec 05 06:06:57 crc kubenswrapper[4652]: I1205 06:06:57.126176 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:06:57 crc kubenswrapper[4652]: E1205 06:06:57.126382 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:06:58 crc kubenswrapper[4652]: I1205 06:06:58.834174 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"64c75413-ccc5-4f2b-8ce7-e3891c408fc8","Type":"ContainerStarted","Data":"43a8c50391cb4f0ecb2fbd4fbd3988962aabbee4b16c85ada66a3ed1055d6fe9"} Dec 05 06:06:58 crc kubenswrapper[4652]: I1205 06:06:58.834534 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"64c75413-ccc5-4f2b-8ce7-e3891c408fc8","Type":"ContainerStarted","Data":"bc8145d363441a54e537700c5b5967b4173fa61ff7f09ddf20af5720d03872f5"} Dec 05 06:06:58 crc kubenswrapper[4652]: I1205 06:06:58.858931 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=12.858909375 podStartE2EDuration="12.858909375s" podCreationTimestamp="2025-12-05 06:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:06:58.852906107 +0000 UTC m=+2421.089636374" watchObservedRunningTime="2025-12-05 06:06:58.858909375 +0000 UTC m=+2421.095639642" Dec 05 06:07:02 crc kubenswrapper[4652]: I1205 06:07:02.158724 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 06:07:02 crc kubenswrapper[4652]: I1205 06:07:02.159101 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 06:07:02 crc kubenswrapper[4652]: I1205 06:07:02.164452 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 06:07:02 crc kubenswrapper[4652]: I1205 06:07:02.888333 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 06:07:11 crc kubenswrapper[4652]: I1205 06:07:11.126262 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:07:11 crc kubenswrapper[4652]: E1205 06:07:11.127042 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.559258 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.561758 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.565350 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.565637 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-694t4" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.565685 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.566007 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.569266 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.683919 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5vzk\" (UniqueName: \"kubernetes.io/projected/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-kube-api-access-t5vzk\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.683966 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.684089 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.684132 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.684156 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.684199 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.684225 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-config-data\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.684258 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.684280 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785665 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785716 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785740 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785778 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785800 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-config-data\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785835 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785861 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785885 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5vzk\" (UniqueName: \"kubernetes.io/projected/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-kube-api-access-t5vzk\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.785906 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.786300 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.786924 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.786975 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.787186 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.787274 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-config-data\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.792879 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.793379 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.793943 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.801725 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5vzk\" (UniqueName: \"kubernetes.io/projected/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-kube-api-access-t5vzk\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.809027 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " pod="openstack/tempest-tests-tempest" Dec 05 06:07:23 crc kubenswrapper[4652]: I1205 06:07:23.881835 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 06:07:24 crc kubenswrapper[4652]: I1205 06:07:24.280200 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 06:07:25 crc kubenswrapper[4652]: I1205 06:07:25.076466 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2","Type":"ContainerStarted","Data":"9ca1a1c550d0f041facc82dd0375c36e185e9be696bdd0ffd6a1f95db9ec9b51"} Dec 05 06:07:26 crc kubenswrapper[4652]: I1205 06:07:26.129443 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:07:26 crc kubenswrapper[4652]: E1205 06:07:26.129960 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:07:39 crc kubenswrapper[4652]: I1205 06:07:39.126006 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:07:39 crc kubenswrapper[4652]: I1205 06:07:39.158331 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 06:07:40 crc kubenswrapper[4652]: I1205 06:07:40.227808 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"531d8012781456fa01e04f9a09992b021b6f932bc6740308e1f407d31ef1ab80"} Dec 05 06:07:40 crc kubenswrapper[4652]: I1205 06:07:40.229653 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2","Type":"ContainerStarted","Data":"4ee485b6b6516b231f15bffb971b75f7f347fddbccd4e67ec4b93ef1f6a43dbb"} Dec 05 06:10:04 crc kubenswrapper[4652]: I1205 06:10:04.150830 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:10:04 crc kubenswrapper[4652]: I1205 06:10:04.151244 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:10:34 crc kubenswrapper[4652]: I1205 06:10:34.150679 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:10:34 crc kubenswrapper[4652]: I1205 06:10:34.151109 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.150108 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.150706 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.150766 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.151731 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"531d8012781456fa01e04f9a09992b021b6f932bc6740308e1f407d31ef1ab80"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.151803 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://531d8012781456fa01e04f9a09992b021b6f932bc6740308e1f407d31ef1ab80" gracePeriod=600 Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.979844 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="531d8012781456fa01e04f9a09992b021b6f932bc6740308e1f407d31ef1ab80" exitCode=0 Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.980350 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"531d8012781456fa01e04f9a09992b021b6f932bc6740308e1f407d31ef1ab80"} Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.980389 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79"} Dec 05 06:11:04 crc kubenswrapper[4652]: I1205 06:11:04.980406 4652 scope.go:117] "RemoveContainer" containerID="79bf139690224ef0e4458f31b0d4e0e77ebe306d02bc55aa9e9ece7a617f9454" Dec 05 06:11:05 crc kubenswrapper[4652]: I1205 06:11:05.023881 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=208.156248679 podStartE2EDuration="3m43.023859203s" podCreationTimestamp="2025-12-05 06:07:22 +0000 UTC" firstStartedPulling="2025-12-05 06:07:24.286616174 +0000 UTC m=+2446.523346441" lastFinishedPulling="2025-12-05 06:07:39.154226698 +0000 UTC m=+2461.390956965" observedRunningTime="2025-12-05 06:07:40.26684547 +0000 UTC m=+2462.503575737" watchObservedRunningTime="2025-12-05 06:11:05.023859203 +0000 UTC m=+2667.260589470" Dec 05 06:13:04 crc kubenswrapper[4652]: I1205 06:13:04.151046 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:13:04 crc kubenswrapper[4652]: I1205 06:13:04.151592 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:13:34 crc kubenswrapper[4652]: I1205 06:13:34.150287 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:13:34 crc kubenswrapper[4652]: I1205 06:13:34.150695 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:13:37 crc kubenswrapper[4652]: E1205 06:13:37.024223 4652 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.93:44402->192.168.25.93:37713: write tcp 192.168.25.93:44402->192.168.25.93:37713: write: broken pipe Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.150232 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.151576 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.151633 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.152742 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.152807 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" gracePeriod=600 Dec 05 06:14:04 crc kubenswrapper[4652]: E1205 06:14:04.268453 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.542469 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" exitCode=0 Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.542521 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79"} Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.542594 4652 scope.go:117] "RemoveContainer" containerID="531d8012781456fa01e04f9a09992b021b6f932bc6740308e1f407d31ef1ab80" Dec 05 06:14:04 crc kubenswrapper[4652]: I1205 06:14:04.544214 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:14:04 crc kubenswrapper[4652]: E1205 06:14:04.544701 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:14:15 crc kubenswrapper[4652]: I1205 06:14:15.125986 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:14:15 crc kubenswrapper[4652]: E1205 06:14:15.126740 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:14:29 crc kubenswrapper[4652]: I1205 06:14:29.126120 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:14:29 crc kubenswrapper[4652]: E1205 06:14:29.126931 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:14:40 crc kubenswrapper[4652]: I1205 06:14:40.126194 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:14:40 crc kubenswrapper[4652]: E1205 06:14:40.127064 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:14:52 crc kubenswrapper[4652]: I1205 06:14:52.126195 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:14:52 crc kubenswrapper[4652]: E1205 06:14:52.126945 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.145103 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr"] Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.147061 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.149481 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.155471 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.155706 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr"] Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.215408 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9xkc\" (UniqueName: \"kubernetes.io/projected/203fac7a-698a-4fbe-9bce-f6e4704073a3-kube-api-access-m9xkc\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.215726 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/203fac7a-698a-4fbe-9bce-f6e4704073a3-secret-volume\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.215879 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/203fac7a-698a-4fbe-9bce-f6e4704073a3-config-volume\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.318239 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/203fac7a-698a-4fbe-9bce-f6e4704073a3-secret-volume\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.318638 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/203fac7a-698a-4fbe-9bce-f6e4704073a3-config-volume\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.318966 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9xkc\" (UniqueName: \"kubernetes.io/projected/203fac7a-698a-4fbe-9bce-f6e4704073a3-kube-api-access-m9xkc\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.319456 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/203fac7a-698a-4fbe-9bce-f6e4704073a3-config-volume\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.324130 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/203fac7a-698a-4fbe-9bce-f6e4704073a3-secret-volume\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.333852 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9xkc\" (UniqueName: \"kubernetes.io/projected/203fac7a-698a-4fbe-9bce-f6e4704073a3-kube-api-access-m9xkc\") pod \"collect-profiles-29415255-p9ppr\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.463631 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:00 crc kubenswrapper[4652]: I1205 06:15:00.882398 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr"] Dec 05 06:15:01 crc kubenswrapper[4652]: I1205 06:15:01.062503 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" event={"ID":"203fac7a-698a-4fbe-9bce-f6e4704073a3","Type":"ContainerStarted","Data":"25299719773a6ea115e247c0a6538edaff3368e180b4c6a3f3eb9e60f36786a4"} Dec 05 06:15:01 crc kubenswrapper[4652]: I1205 06:15:01.063057 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" event={"ID":"203fac7a-698a-4fbe-9bce-f6e4704073a3","Type":"ContainerStarted","Data":"597a5de4f78bc0977f7a46c9f37ae7951f038a9cf244fecdd8c13e1932cd96b5"} Dec 05 06:15:02 crc kubenswrapper[4652]: I1205 06:15:02.074075 4652 generic.go:334] "Generic (PLEG): container finished" podID="203fac7a-698a-4fbe-9bce-f6e4704073a3" containerID="25299719773a6ea115e247c0a6538edaff3368e180b4c6a3f3eb9e60f36786a4" exitCode=0 Dec 05 06:15:02 crc kubenswrapper[4652]: I1205 06:15:02.074192 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" event={"ID":"203fac7a-698a-4fbe-9bce-f6e4704073a3","Type":"ContainerDied","Data":"25299719773a6ea115e247c0a6538edaff3368e180b4c6a3f3eb9e60f36786a4"} Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.127127 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:15:03 crc kubenswrapper[4652]: E1205 06:15:03.127679 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.361646 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.396567 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/203fac7a-698a-4fbe-9bce-f6e4704073a3-config-volume\") pod \"203fac7a-698a-4fbe-9bce-f6e4704073a3\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.396628 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/203fac7a-698a-4fbe-9bce-f6e4704073a3-secret-volume\") pod \"203fac7a-698a-4fbe-9bce-f6e4704073a3\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.396941 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9xkc\" (UniqueName: \"kubernetes.io/projected/203fac7a-698a-4fbe-9bce-f6e4704073a3-kube-api-access-m9xkc\") pod \"203fac7a-698a-4fbe-9bce-f6e4704073a3\" (UID: \"203fac7a-698a-4fbe-9bce-f6e4704073a3\") " Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.397239 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/203fac7a-698a-4fbe-9bce-f6e4704073a3-config-volume" (OuterVolumeSpecName: "config-volume") pod "203fac7a-698a-4fbe-9bce-f6e4704073a3" (UID: "203fac7a-698a-4fbe-9bce-f6e4704073a3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.398907 4652 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/203fac7a-698a-4fbe-9bce-f6e4704073a3-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.403829 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/203fac7a-698a-4fbe-9bce-f6e4704073a3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "203fac7a-698a-4fbe-9bce-f6e4704073a3" (UID: "203fac7a-698a-4fbe-9bce-f6e4704073a3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.403995 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/203fac7a-698a-4fbe-9bce-f6e4704073a3-kube-api-access-m9xkc" (OuterVolumeSpecName: "kube-api-access-m9xkc") pod "203fac7a-698a-4fbe-9bce-f6e4704073a3" (UID: "203fac7a-698a-4fbe-9bce-f6e4704073a3"). InnerVolumeSpecName "kube-api-access-m9xkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.501258 4652 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/203fac7a-698a-4fbe-9bce-f6e4704073a3-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:03 crc kubenswrapper[4652]: I1205 06:15:03.501296 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9xkc\" (UniqueName: \"kubernetes.io/projected/203fac7a-698a-4fbe-9bce-f6e4704073a3-kube-api-access-m9xkc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:04 crc kubenswrapper[4652]: I1205 06:15:04.093495 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" event={"ID":"203fac7a-698a-4fbe-9bce-f6e4704073a3","Type":"ContainerDied","Data":"597a5de4f78bc0977f7a46c9f37ae7951f038a9cf244fecdd8c13e1932cd96b5"} Dec 05 06:15:04 crc kubenswrapper[4652]: I1205 06:15:04.094181 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="597a5de4f78bc0977f7a46c9f37ae7951f038a9cf244fecdd8c13e1932cd96b5" Dec 05 06:15:04 crc kubenswrapper[4652]: I1205 06:15:04.093568 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-p9ppr" Dec 05 06:15:04 crc kubenswrapper[4652]: I1205 06:15:04.426373 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7"] Dec 05 06:15:04 crc kubenswrapper[4652]: I1205 06:15:04.434877 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415210-gp9m7"] Dec 05 06:15:06 crc kubenswrapper[4652]: I1205 06:15:06.134996 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffce5434-745e-4ed0-ad5f-b20f9ca06950" path="/var/lib/kubelet/pods/ffce5434-745e-4ed0-ad5f-b20f9ca06950/volumes" Dec 05 06:15:18 crc kubenswrapper[4652]: I1205 06:15:18.131420 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:15:18 crc kubenswrapper[4652]: E1205 06:15:18.131968 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:15:30 crc kubenswrapper[4652]: I1205 06:15:30.125662 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:15:30 crc kubenswrapper[4652]: E1205 06:15:30.126448 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:15:41 crc kubenswrapper[4652]: I1205 06:15:41.560017 4652 scope.go:117] "RemoveContainer" containerID="d58dc165028e979f5b805b50bf54589625de1daa7c4b46aca75c9a5b58d102cc" Dec 05 06:15:42 crc kubenswrapper[4652]: I1205 06:15:42.126369 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:15:42 crc kubenswrapper[4652]: E1205 06:15:42.127012 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:15:54 crc kubenswrapper[4652]: I1205 06:15:54.126189 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:15:54 crc kubenswrapper[4652]: E1205 06:15:54.126968 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:16:08 crc kubenswrapper[4652]: I1205 06:16:08.131113 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:16:08 crc kubenswrapper[4652]: E1205 06:16:08.131756 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:16:23 crc kubenswrapper[4652]: I1205 06:16:23.125819 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:16:23 crc kubenswrapper[4652]: E1205 06:16:23.126404 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:16:34 crc kubenswrapper[4652]: I1205 06:16:34.125653 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:16:34 crc kubenswrapper[4652]: E1205 06:16:34.126307 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:16:46 crc kubenswrapper[4652]: I1205 06:16:46.125842 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:16:46 crc kubenswrapper[4652]: E1205 06:16:46.126364 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:16:55 crc kubenswrapper[4652]: I1205 06:16:55.813604 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4r59s"] Dec 05 06:16:55 crc kubenswrapper[4652]: E1205 06:16:55.814677 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="203fac7a-698a-4fbe-9bce-f6e4704073a3" containerName="collect-profiles" Dec 05 06:16:55 crc kubenswrapper[4652]: I1205 06:16:55.814691 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="203fac7a-698a-4fbe-9bce-f6e4704073a3" containerName="collect-profiles" Dec 05 06:16:55 crc kubenswrapper[4652]: I1205 06:16:55.814879 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="203fac7a-698a-4fbe-9bce-f6e4704073a3" containerName="collect-profiles" Dec 05 06:16:55 crc kubenswrapper[4652]: I1205 06:16:55.817418 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:55 crc kubenswrapper[4652]: I1205 06:16:55.828908 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4r59s"] Dec 05 06:16:55 crc kubenswrapper[4652]: I1205 06:16:55.911295 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-utilities\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:55 crc kubenswrapper[4652]: I1205 06:16:55.911446 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-catalog-content\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:55 crc kubenswrapper[4652]: I1205 06:16:55.911524 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kwcj\" (UniqueName: \"kubernetes.io/projected/0216f080-70d5-4847-bea5-b9ed893d3cd1-kube-api-access-8kwcj\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.011299 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rjvjs"] Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.013207 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-utilities\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.013310 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-catalog-content\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.013356 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kwcj\" (UniqueName: \"kubernetes.io/projected/0216f080-70d5-4847-bea5-b9ed893d3cd1-kube-api-access-8kwcj\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.013430 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.014100 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-utilities\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.014432 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-catalog-content\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.020148 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rjvjs"] Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.032926 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kwcj\" (UniqueName: \"kubernetes.io/projected/0216f080-70d5-4847-bea5-b9ed893d3cd1-kube-api-access-8kwcj\") pod \"redhat-marketplace-4r59s\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.115347 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-catalog-content\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.115762 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-utilities\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.115876 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bs5d\" (UniqueName: \"kubernetes.io/projected/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-kube-api-access-2bs5d\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.141365 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.220465 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-utilities\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.220758 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bs5d\" (UniqueName: \"kubernetes.io/projected/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-kube-api-access-2bs5d\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.220910 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-catalog-content\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.221186 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-utilities\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.222254 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-catalog-content\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.257522 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bs5d\" (UniqueName: \"kubernetes.io/projected/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-kube-api-access-2bs5d\") pod \"community-operators-rjvjs\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.371237 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.569483 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4r59s"] Dec 05 06:16:56 crc kubenswrapper[4652]: I1205 06:16:56.829983 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rjvjs"] Dec 05 06:16:57 crc kubenswrapper[4652]: I1205 06:16:57.036288 4652 generic.go:334] "Generic (PLEG): container finished" podID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerID="a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6" exitCode=0 Dec 05 06:16:57 crc kubenswrapper[4652]: I1205 06:16:57.036389 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4r59s" event={"ID":"0216f080-70d5-4847-bea5-b9ed893d3cd1","Type":"ContainerDied","Data":"a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6"} Dec 05 06:16:57 crc kubenswrapper[4652]: I1205 06:16:57.036444 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4r59s" event={"ID":"0216f080-70d5-4847-bea5-b9ed893d3cd1","Type":"ContainerStarted","Data":"573963f2fc754c12cbf4fb4a17f02870efc819007277a0c944d62048ebfe8b0a"} Dec 05 06:16:57 crc kubenswrapper[4652]: I1205 06:16:57.038293 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:16:57 crc kubenswrapper[4652]: I1205 06:16:57.038808 4652 generic.go:334] "Generic (PLEG): container finished" podID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerID="42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9" exitCode=0 Dec 05 06:16:57 crc kubenswrapper[4652]: I1205 06:16:57.038841 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjvjs" event={"ID":"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562","Type":"ContainerDied","Data":"42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9"} Dec 05 06:16:57 crc kubenswrapper[4652]: I1205 06:16:57.038869 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjvjs" event={"ID":"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562","Type":"ContainerStarted","Data":"3f5dcb9dfa011ef486c4c9e0d18495f084118b7edb69fa09de9d61bf5d980f85"} Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.049289 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4r59s" event={"ID":"0216f080-70d5-4847-bea5-b9ed893d3cd1","Type":"ContainerStarted","Data":"0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee"} Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.052188 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjvjs" event={"ID":"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562","Type":"ContainerStarted","Data":"885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1"} Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.211353 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2q8qs"] Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.213791 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.224085 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2q8qs"] Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.262958 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-catalog-content\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.263003 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m86w\" (UniqueName: \"kubernetes.io/projected/b7cddbe2-3da7-451a-a775-551708dabb54-kube-api-access-2m86w\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.263550 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-utilities\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.365702 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-catalog-content\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.365752 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m86w\" (UniqueName: \"kubernetes.io/projected/b7cddbe2-3da7-451a-a775-551708dabb54-kube-api-access-2m86w\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.366043 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-utilities\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.366255 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-catalog-content\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.366476 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-utilities\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.384347 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m86w\" (UniqueName: \"kubernetes.io/projected/b7cddbe2-3da7-451a-a775-551708dabb54-kube-api-access-2m86w\") pod \"certified-operators-2q8qs\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.533427 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:16:58 crc kubenswrapper[4652]: W1205 06:16:58.978354 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7cddbe2_3da7_451a_a775_551708dabb54.slice/crio-cab85e190804e7e17f98bf6f01758d57d78b9ee22691ac2c7b709d5e2d95f13f WatchSource:0}: Error finding container cab85e190804e7e17f98bf6f01758d57d78b9ee22691ac2c7b709d5e2d95f13f: Status 404 returned error can't find the container with id cab85e190804e7e17f98bf6f01758d57d78b9ee22691ac2c7b709d5e2d95f13f Dec 05 06:16:58 crc kubenswrapper[4652]: I1205 06:16:58.978907 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2q8qs"] Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.062033 4652 generic.go:334] "Generic (PLEG): container finished" podID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerID="885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1" exitCode=0 Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.062142 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjvjs" event={"ID":"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562","Type":"ContainerDied","Data":"885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1"} Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.064172 4652 generic.go:334] "Generic (PLEG): container finished" podID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerID="0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee" exitCode=0 Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.064241 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4r59s" event={"ID":"0216f080-70d5-4847-bea5-b9ed893d3cd1","Type":"ContainerDied","Data":"0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee"} Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.066241 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2q8qs" event={"ID":"b7cddbe2-3da7-451a-a775-551708dabb54","Type":"ContainerStarted","Data":"cab85e190804e7e17f98bf6f01758d57d78b9ee22691ac2c7b709d5e2d95f13f"} Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.126433 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:16:59 crc kubenswrapper[4652]: E1205 06:16:59.126749 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.207690 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2gz7j"] Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.210519 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.224172 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2gz7j"] Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.285025 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgcjq\" (UniqueName: \"kubernetes.io/projected/fd737f0c-26b5-4182-b46f-04f6d2b804d5-kube-api-access-wgcjq\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.285120 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-catalog-content\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.285283 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-utilities\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.387006 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-utilities\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.387123 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgcjq\" (UniqueName: \"kubernetes.io/projected/fd737f0c-26b5-4182-b46f-04f6d2b804d5-kube-api-access-wgcjq\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.387171 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-catalog-content\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.387467 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-utilities\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.387483 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-catalog-content\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.412511 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgcjq\" (UniqueName: \"kubernetes.io/projected/fd737f0c-26b5-4182-b46f-04f6d2b804d5-kube-api-access-wgcjq\") pod \"redhat-operators-2gz7j\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:16:59 crc kubenswrapper[4652]: I1205 06:16:59.562925 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:17:00 crc kubenswrapper[4652]: I1205 06:17:00.031263 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2gz7j"] Dec 05 06:17:00 crc kubenswrapper[4652]: W1205 06:17:00.032664 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd737f0c_26b5_4182_b46f_04f6d2b804d5.slice/crio-d6c094e421177f360879899f4fb3c26e3b906c7f54301ed9d693150a88c398ff WatchSource:0}: Error finding container d6c094e421177f360879899f4fb3c26e3b906c7f54301ed9d693150a88c398ff: Status 404 returned error can't find the container with id d6c094e421177f360879899f4fb3c26e3b906c7f54301ed9d693150a88c398ff Dec 05 06:17:00 crc kubenswrapper[4652]: I1205 06:17:00.077072 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjvjs" event={"ID":"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562","Type":"ContainerStarted","Data":"d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052"} Dec 05 06:17:00 crc kubenswrapper[4652]: I1205 06:17:00.081283 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4r59s" event={"ID":"0216f080-70d5-4847-bea5-b9ed893d3cd1","Type":"ContainerStarted","Data":"143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6"} Dec 05 06:17:00 crc kubenswrapper[4652]: I1205 06:17:00.084865 4652 generic.go:334] "Generic (PLEG): container finished" podID="b7cddbe2-3da7-451a-a775-551708dabb54" containerID="61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909" exitCode=0 Dec 05 06:17:00 crc kubenswrapper[4652]: I1205 06:17:00.084919 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2q8qs" event={"ID":"b7cddbe2-3da7-451a-a775-551708dabb54","Type":"ContainerDied","Data":"61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909"} Dec 05 06:17:00 crc kubenswrapper[4652]: I1205 06:17:00.093542 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gz7j" event={"ID":"fd737f0c-26b5-4182-b46f-04f6d2b804d5","Type":"ContainerStarted","Data":"d6c094e421177f360879899f4fb3c26e3b906c7f54301ed9d693150a88c398ff"} Dec 05 06:17:00 crc kubenswrapper[4652]: I1205 06:17:00.100549 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rjvjs" podStartSLOduration=2.473479981 podStartE2EDuration="5.100534655s" podCreationTimestamp="2025-12-05 06:16:55 +0000 UTC" firstStartedPulling="2025-12-05 06:16:57.040427838 +0000 UTC m=+3019.277158104" lastFinishedPulling="2025-12-05 06:16:59.66748252 +0000 UTC m=+3021.904212778" observedRunningTime="2025-12-05 06:17:00.09509564 +0000 UTC m=+3022.331825908" watchObservedRunningTime="2025-12-05 06:17:00.100534655 +0000 UTC m=+3022.337264923" Dec 05 06:17:00 crc kubenswrapper[4652]: I1205 06:17:00.146090 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4r59s" podStartSLOduration=2.637103392 podStartE2EDuration="5.146073945s" podCreationTimestamp="2025-12-05 06:16:55 +0000 UTC" firstStartedPulling="2025-12-05 06:16:57.037849531 +0000 UTC m=+3019.274579798" lastFinishedPulling="2025-12-05 06:16:59.546820083 +0000 UTC m=+3021.783550351" observedRunningTime="2025-12-05 06:17:00.138425517 +0000 UTC m=+3022.375155783" watchObservedRunningTime="2025-12-05 06:17:00.146073945 +0000 UTC m=+3022.382804213" Dec 05 06:17:01 crc kubenswrapper[4652]: I1205 06:17:01.105803 4652 generic.go:334] "Generic (PLEG): container finished" podID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerID="69ab87e93c32b9f6a55ae1a99f9e603d1aa5eb0256edd5a85b3206f0644e56b3" exitCode=0 Dec 05 06:17:01 crc kubenswrapper[4652]: I1205 06:17:01.105911 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gz7j" event={"ID":"fd737f0c-26b5-4182-b46f-04f6d2b804d5","Type":"ContainerDied","Data":"69ab87e93c32b9f6a55ae1a99f9e603d1aa5eb0256edd5a85b3206f0644e56b3"} Dec 05 06:17:01 crc kubenswrapper[4652]: I1205 06:17:01.108073 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2q8qs" event={"ID":"b7cddbe2-3da7-451a-a775-551708dabb54","Type":"ContainerStarted","Data":"1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8"} Dec 05 06:17:02 crc kubenswrapper[4652]: I1205 06:17:02.118373 4652 generic.go:334] "Generic (PLEG): container finished" podID="b7cddbe2-3da7-451a-a775-551708dabb54" containerID="1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8" exitCode=0 Dec 05 06:17:02 crc kubenswrapper[4652]: I1205 06:17:02.118491 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2q8qs" event={"ID":"b7cddbe2-3da7-451a-a775-551708dabb54","Type":"ContainerDied","Data":"1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8"} Dec 05 06:17:02 crc kubenswrapper[4652]: I1205 06:17:02.122315 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gz7j" event={"ID":"fd737f0c-26b5-4182-b46f-04f6d2b804d5","Type":"ContainerStarted","Data":"023296c7d2c68bdd3b7f51b3c68879ab99aefd0025078a77eb034e9b75a7b7de"} Dec 05 06:17:03 crc kubenswrapper[4652]: I1205 06:17:03.143927 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2q8qs" event={"ID":"b7cddbe2-3da7-451a-a775-551708dabb54","Type":"ContainerStarted","Data":"d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903"} Dec 05 06:17:03 crc kubenswrapper[4652]: I1205 06:17:03.166171 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2q8qs" podStartSLOduration=2.564034338 podStartE2EDuration="5.166153455s" podCreationTimestamp="2025-12-05 06:16:58 +0000 UTC" firstStartedPulling="2025-12-05 06:17:00.091891276 +0000 UTC m=+3022.328621542" lastFinishedPulling="2025-12-05 06:17:02.694010392 +0000 UTC m=+3024.930740659" observedRunningTime="2025-12-05 06:17:03.163987022 +0000 UTC m=+3025.400717289" watchObservedRunningTime="2025-12-05 06:17:03.166153455 +0000 UTC m=+3025.402883722" Dec 05 06:17:04 crc kubenswrapper[4652]: I1205 06:17:04.153445 4652 generic.go:334] "Generic (PLEG): container finished" podID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerID="023296c7d2c68bdd3b7f51b3c68879ab99aefd0025078a77eb034e9b75a7b7de" exitCode=0 Dec 05 06:17:04 crc kubenswrapper[4652]: I1205 06:17:04.153508 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gz7j" event={"ID":"fd737f0c-26b5-4182-b46f-04f6d2b804d5","Type":"ContainerDied","Data":"023296c7d2c68bdd3b7f51b3c68879ab99aefd0025078a77eb034e9b75a7b7de"} Dec 05 06:17:05 crc kubenswrapper[4652]: I1205 06:17:05.164974 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gz7j" event={"ID":"fd737f0c-26b5-4182-b46f-04f6d2b804d5","Type":"ContainerStarted","Data":"c7decf6ad4786bda68090a3803b00ecb47c5cfcb0cdc86dadee280cb33935e62"} Dec 05 06:17:05 crc kubenswrapper[4652]: I1205 06:17:05.186095 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2gz7j" podStartSLOduration=2.577506209 podStartE2EDuration="6.186079341s" podCreationTimestamp="2025-12-05 06:16:59 +0000 UTC" firstStartedPulling="2025-12-05 06:17:01.107047823 +0000 UTC m=+3023.343778090" lastFinishedPulling="2025-12-05 06:17:04.715620955 +0000 UTC m=+3026.952351222" observedRunningTime="2025-12-05 06:17:05.178815636 +0000 UTC m=+3027.415545903" watchObservedRunningTime="2025-12-05 06:17:05.186079341 +0000 UTC m=+3027.422809608" Dec 05 06:17:06 crc kubenswrapper[4652]: I1205 06:17:06.141658 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:17:06 crc kubenswrapper[4652]: I1205 06:17:06.141711 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:17:06 crc kubenswrapper[4652]: I1205 06:17:06.181463 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:17:06 crc kubenswrapper[4652]: I1205 06:17:06.217379 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:17:06 crc kubenswrapper[4652]: I1205 06:17:06.372381 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:17:06 crc kubenswrapper[4652]: I1205 06:17:06.372424 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:17:06 crc kubenswrapper[4652]: I1205 06:17:06.406743 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:17:07 crc kubenswrapper[4652]: I1205 06:17:07.227763 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:17:08 crc kubenswrapper[4652]: I1205 06:17:08.533802 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:17:08 crc kubenswrapper[4652]: I1205 06:17:08.534059 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:17:08 crc kubenswrapper[4652]: I1205 06:17:08.568103 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:17:08 crc kubenswrapper[4652]: I1205 06:17:08.602923 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4r59s"] Dec 05 06:17:08 crc kubenswrapper[4652]: I1205 06:17:08.603135 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4r59s" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerName="registry-server" containerID="cri-o://143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6" gracePeriod=2 Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.016682 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.091277 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-catalog-content\") pod \"0216f080-70d5-4847-bea5-b9ed893d3cd1\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.091426 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-utilities\") pod \"0216f080-70d5-4847-bea5-b9ed893d3cd1\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.091670 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kwcj\" (UniqueName: \"kubernetes.io/projected/0216f080-70d5-4847-bea5-b9ed893d3cd1-kube-api-access-8kwcj\") pod \"0216f080-70d5-4847-bea5-b9ed893d3cd1\" (UID: \"0216f080-70d5-4847-bea5-b9ed893d3cd1\") " Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.092086 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-utilities" (OuterVolumeSpecName: "utilities") pod "0216f080-70d5-4847-bea5-b9ed893d3cd1" (UID: "0216f080-70d5-4847-bea5-b9ed893d3cd1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.092578 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.098292 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0216f080-70d5-4847-bea5-b9ed893d3cd1-kube-api-access-8kwcj" (OuterVolumeSpecName: "kube-api-access-8kwcj") pod "0216f080-70d5-4847-bea5-b9ed893d3cd1" (UID: "0216f080-70d5-4847-bea5-b9ed893d3cd1"). InnerVolumeSpecName "kube-api-access-8kwcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.106044 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0216f080-70d5-4847-bea5-b9ed893d3cd1" (UID: "0216f080-70d5-4847-bea5-b9ed893d3cd1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.194843 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kwcj\" (UniqueName: \"kubernetes.io/projected/0216f080-70d5-4847-bea5-b9ed893d3cd1-kube-api-access-8kwcj\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.195024 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0216f080-70d5-4847-bea5-b9ed893d3cd1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.210950 4652 generic.go:334] "Generic (PLEG): container finished" podID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerID="143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6" exitCode=0 Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.211006 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4r59s" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.211056 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4r59s" event={"ID":"0216f080-70d5-4847-bea5-b9ed893d3cd1","Type":"ContainerDied","Data":"143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6"} Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.211118 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4r59s" event={"ID":"0216f080-70d5-4847-bea5-b9ed893d3cd1","Type":"ContainerDied","Data":"573963f2fc754c12cbf4fb4a17f02870efc819007277a0c944d62048ebfe8b0a"} Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.211139 4652 scope.go:117] "RemoveContainer" containerID="143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.246314 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4r59s"] Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.248070 4652 scope.go:117] "RemoveContainer" containerID="0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.256072 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4r59s"] Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.259671 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.267131 4652 scope.go:117] "RemoveContainer" containerID="a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.316307 4652 scope.go:117] "RemoveContainer" containerID="143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6" Dec 05 06:17:09 crc kubenswrapper[4652]: E1205 06:17:09.316856 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6\": container with ID starting with 143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6 not found: ID does not exist" containerID="143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.316889 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6"} err="failed to get container status \"143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6\": rpc error: code = NotFound desc = could not find container \"143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6\": container with ID starting with 143dbf5022bdef836df2e994b747b64eaa10a9ea2a6b5c7da6eac4614fb16fa6 not found: ID does not exist" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.316911 4652 scope.go:117] "RemoveContainer" containerID="0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee" Dec 05 06:17:09 crc kubenswrapper[4652]: E1205 06:17:09.317230 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee\": container with ID starting with 0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee not found: ID does not exist" containerID="0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.317267 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee"} err="failed to get container status \"0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee\": rpc error: code = NotFound desc = could not find container \"0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee\": container with ID starting with 0a7e32759ac783451a474b7934fcbba5539456794c66a9e72c28888b0e2918ee not found: ID does not exist" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.317295 4652 scope.go:117] "RemoveContainer" containerID="a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6" Dec 05 06:17:09 crc kubenswrapper[4652]: E1205 06:17:09.317622 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6\": container with ID starting with a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6 not found: ID does not exist" containerID="a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.317647 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6"} err="failed to get container status \"a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6\": rpc error: code = NotFound desc = could not find container \"a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6\": container with ID starting with a5f80dc1a30ddc7f52cbcf0528aa6a33461282456086dba33c2871fb07961bb6 not found: ID does not exist" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.564433 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.564493 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.600110 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.605030 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rjvjs"] Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.605304 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rjvjs" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerName="registry-server" containerID="cri-o://d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052" gracePeriod=2 Dec 05 06:17:09 crc kubenswrapper[4652]: I1205 06:17:09.985526 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.012221 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-utilities\") pod \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.012283 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bs5d\" (UniqueName: \"kubernetes.io/projected/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-kube-api-access-2bs5d\") pod \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.012325 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-catalog-content\") pod \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\" (UID: \"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562\") " Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.012794 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-utilities" (OuterVolumeSpecName: "utilities") pod "c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" (UID: "c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.016978 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-kube-api-access-2bs5d" (OuterVolumeSpecName: "kube-api-access-2bs5d") pod "c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" (UID: "c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562"). InnerVolumeSpecName "kube-api-access-2bs5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.048285 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" (UID: "c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.114358 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.114387 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bs5d\" (UniqueName: \"kubernetes.io/projected/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-kube-api-access-2bs5d\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.114397 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.125552 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:17:10 crc kubenswrapper[4652]: E1205 06:17:10.125865 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.136037 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" path="/var/lib/kubelet/pods/0216f080-70d5-4847-bea5-b9ed893d3cd1/volumes" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.222905 4652 generic.go:334] "Generic (PLEG): container finished" podID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerID="d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052" exitCode=0 Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.222974 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rjvjs" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.223006 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjvjs" event={"ID":"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562","Type":"ContainerDied","Data":"d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052"} Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.223066 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rjvjs" event={"ID":"c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562","Type":"ContainerDied","Data":"3f5dcb9dfa011ef486c4c9e0d18495f084118b7edb69fa09de9d61bf5d980f85"} Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.223091 4652 scope.go:117] "RemoveContainer" containerID="d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.245017 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rjvjs"] Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.246732 4652 scope.go:117] "RemoveContainer" containerID="885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.251541 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rjvjs"] Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.263614 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.265093 4652 scope.go:117] "RemoveContainer" containerID="42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.285192 4652 scope.go:117] "RemoveContainer" containerID="d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052" Dec 05 06:17:10 crc kubenswrapper[4652]: E1205 06:17:10.285723 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052\": container with ID starting with d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052 not found: ID does not exist" containerID="d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.285763 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052"} err="failed to get container status \"d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052\": rpc error: code = NotFound desc = could not find container \"d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052\": container with ID starting with d849c69f6cb20916eb90eb764aeff23accbd38626781b268dca7fcc472fe6052 not found: ID does not exist" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.285792 4652 scope.go:117] "RemoveContainer" containerID="885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1" Dec 05 06:17:10 crc kubenswrapper[4652]: E1205 06:17:10.286120 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1\": container with ID starting with 885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1 not found: ID does not exist" containerID="885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.286153 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1"} err="failed to get container status \"885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1\": rpc error: code = NotFound desc = could not find container \"885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1\": container with ID starting with 885100ce43eea3ffd77acff0bf0f91eff8fa6dd6c3f5cb3a97625985bae304e1 not found: ID does not exist" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.286177 4652 scope.go:117] "RemoveContainer" containerID="42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9" Dec 05 06:17:10 crc kubenswrapper[4652]: E1205 06:17:10.286902 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9\": container with ID starting with 42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9 not found: ID does not exist" containerID="42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9" Dec 05 06:17:10 crc kubenswrapper[4652]: I1205 06:17:10.286984 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9"} err="failed to get container status \"42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9\": rpc error: code = NotFound desc = could not find container \"42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9\": container with ID starting with 42401b06bcd53c22fa57f2131d5ad8abd38535ba817234b0304c11ec5520cdc9 not found: ID does not exist" Dec 05 06:17:11 crc kubenswrapper[4652]: I1205 06:17:11.005536 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2q8qs"] Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.134776 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" path="/var/lib/kubelet/pods/c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562/volumes" Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.241263 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2q8qs" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" containerName="registry-server" containerID="cri-o://d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903" gracePeriod=2 Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.622433 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.670651 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m86w\" (UniqueName: \"kubernetes.io/projected/b7cddbe2-3da7-451a-a775-551708dabb54-kube-api-access-2m86w\") pod \"b7cddbe2-3da7-451a-a775-551708dabb54\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.670791 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-catalog-content\") pod \"b7cddbe2-3da7-451a-a775-551708dabb54\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.670862 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-utilities\") pod \"b7cddbe2-3da7-451a-a775-551708dabb54\" (UID: \"b7cddbe2-3da7-451a-a775-551708dabb54\") " Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.671492 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-utilities" (OuterVolumeSpecName: "utilities") pod "b7cddbe2-3da7-451a-a775-551708dabb54" (UID: "b7cddbe2-3da7-451a-a775-551708dabb54"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.671673 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.676646 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7cddbe2-3da7-451a-a775-551708dabb54-kube-api-access-2m86w" (OuterVolumeSpecName: "kube-api-access-2m86w") pod "b7cddbe2-3da7-451a-a775-551708dabb54" (UID: "b7cddbe2-3da7-451a-a775-551708dabb54"). InnerVolumeSpecName "kube-api-access-2m86w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.706697 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7cddbe2-3da7-451a-a775-551708dabb54" (UID: "b7cddbe2-3da7-451a-a775-551708dabb54"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.773786 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m86w\" (UniqueName: \"kubernetes.io/projected/b7cddbe2-3da7-451a-a775-551708dabb54-kube-api-access-2m86w\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:12 crc kubenswrapper[4652]: I1205 06:17:12.773817 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7cddbe2-3da7-451a-a775-551708dabb54-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.253901 4652 generic.go:334] "Generic (PLEG): container finished" podID="b7cddbe2-3da7-451a-a775-551708dabb54" containerID="d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903" exitCode=0 Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.253958 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2q8qs" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.253986 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2q8qs" event={"ID":"b7cddbe2-3da7-451a-a775-551708dabb54","Type":"ContainerDied","Data":"d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903"} Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.254909 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2q8qs" event={"ID":"b7cddbe2-3da7-451a-a775-551708dabb54","Type":"ContainerDied","Data":"cab85e190804e7e17f98bf6f01758d57d78b9ee22691ac2c7b709d5e2d95f13f"} Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.254977 4652 scope.go:117] "RemoveContainer" containerID="d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.272093 4652 scope.go:117] "RemoveContainer" containerID="1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.295908 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2q8qs"] Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.300490 4652 scope.go:117] "RemoveContainer" containerID="61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.304468 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2q8qs"] Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.331345 4652 scope.go:117] "RemoveContainer" containerID="d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903" Dec 05 06:17:13 crc kubenswrapper[4652]: E1205 06:17:13.331647 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903\": container with ID starting with d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903 not found: ID does not exist" containerID="d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.331688 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903"} err="failed to get container status \"d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903\": rpc error: code = NotFound desc = could not find container \"d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903\": container with ID starting with d8a476921b464e29f4513fc159b97e4f2f505aeaae04f5a187d335ef321fe903 not found: ID does not exist" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.331717 4652 scope.go:117] "RemoveContainer" containerID="1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8" Dec 05 06:17:13 crc kubenswrapper[4652]: E1205 06:17:13.331978 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8\": container with ID starting with 1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8 not found: ID does not exist" containerID="1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.332005 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8"} err="failed to get container status \"1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8\": rpc error: code = NotFound desc = could not find container \"1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8\": container with ID starting with 1306f39f401239fac5c0640a6008d51da9e65a2d9f6578721dd79513e98e58a8 not found: ID does not exist" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.332021 4652 scope.go:117] "RemoveContainer" containerID="61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909" Dec 05 06:17:13 crc kubenswrapper[4652]: E1205 06:17:13.333154 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909\": container with ID starting with 61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909 not found: ID does not exist" containerID="61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909" Dec 05 06:17:13 crc kubenswrapper[4652]: I1205 06:17:13.333196 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909"} err="failed to get container status \"61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909\": rpc error: code = NotFound desc = could not find container \"61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909\": container with ID starting with 61a44ef98e8bcc265b95a28a63d591de23c734c5d9493eb7752cc10cf1a1e909 not found: ID does not exist" Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.002151 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2gz7j"] Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.002789 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2gz7j" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerName="registry-server" containerID="cri-o://c7decf6ad4786bda68090a3803b00ecb47c5cfcb0cdc86dadee280cb33935e62" gracePeriod=2 Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.135014 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" path="/var/lib/kubelet/pods/b7cddbe2-3da7-451a-a775-551708dabb54/volumes" Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.264158 4652 generic.go:334] "Generic (PLEG): container finished" podID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerID="c7decf6ad4786bda68090a3803b00ecb47c5cfcb0cdc86dadee280cb33935e62" exitCode=0 Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.264242 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gz7j" event={"ID":"fd737f0c-26b5-4182-b46f-04f6d2b804d5","Type":"ContainerDied","Data":"c7decf6ad4786bda68090a3803b00ecb47c5cfcb0cdc86dadee280cb33935e62"} Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.381869 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.407252 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-utilities\") pod \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.407337 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgcjq\" (UniqueName: \"kubernetes.io/projected/fd737f0c-26b5-4182-b46f-04f6d2b804d5-kube-api-access-wgcjq\") pod \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.407368 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-catalog-content\") pod \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\" (UID: \"fd737f0c-26b5-4182-b46f-04f6d2b804d5\") " Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.407964 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-utilities" (OuterVolumeSpecName: "utilities") pod "fd737f0c-26b5-4182-b46f-04f6d2b804d5" (UID: "fd737f0c-26b5-4182-b46f-04f6d2b804d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.409198 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.412255 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd737f0c-26b5-4182-b46f-04f6d2b804d5-kube-api-access-wgcjq" (OuterVolumeSpecName: "kube-api-access-wgcjq") pod "fd737f0c-26b5-4182-b46f-04f6d2b804d5" (UID: "fd737f0c-26b5-4182-b46f-04f6d2b804d5"). InnerVolumeSpecName "kube-api-access-wgcjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.490675 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd737f0c-26b5-4182-b46f-04f6d2b804d5" (UID: "fd737f0c-26b5-4182-b46f-04f6d2b804d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.510425 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgcjq\" (UniqueName: \"kubernetes.io/projected/fd737f0c-26b5-4182-b46f-04f6d2b804d5-kube-api-access-wgcjq\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:14 crc kubenswrapper[4652]: I1205 06:17:14.510578 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd737f0c-26b5-4182-b46f-04f6d2b804d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:15 crc kubenswrapper[4652]: I1205 06:17:15.277073 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gz7j" event={"ID":"fd737f0c-26b5-4182-b46f-04f6d2b804d5","Type":"ContainerDied","Data":"d6c094e421177f360879899f4fb3c26e3b906c7f54301ed9d693150a88c398ff"} Dec 05 06:17:15 crc kubenswrapper[4652]: I1205 06:17:15.277326 4652 scope.go:117] "RemoveContainer" containerID="c7decf6ad4786bda68090a3803b00ecb47c5cfcb0cdc86dadee280cb33935e62" Dec 05 06:17:15 crc kubenswrapper[4652]: I1205 06:17:15.277123 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2gz7j" Dec 05 06:17:15 crc kubenswrapper[4652]: I1205 06:17:15.294753 4652 scope.go:117] "RemoveContainer" containerID="023296c7d2c68bdd3b7f51b3c68879ab99aefd0025078a77eb034e9b75a7b7de" Dec 05 06:17:15 crc kubenswrapper[4652]: I1205 06:17:15.306651 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2gz7j"] Dec 05 06:17:15 crc kubenswrapper[4652]: I1205 06:17:15.315310 4652 scope.go:117] "RemoveContainer" containerID="69ab87e93c32b9f6a55ae1a99f9e603d1aa5eb0256edd5a85b3206f0644e56b3" Dec 05 06:17:15 crc kubenswrapper[4652]: I1205 06:17:15.316971 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2gz7j"] Dec 05 06:17:16 crc kubenswrapper[4652]: I1205 06:17:16.135866 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" path="/var/lib/kubelet/pods/fd737f0c-26b5-4182-b46f-04f6d2b804d5/volumes" Dec 05 06:17:21 crc kubenswrapper[4652]: I1205 06:17:21.125827 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:17:21 crc kubenswrapper[4652]: E1205 06:17:21.126983 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:17:36 crc kubenswrapper[4652]: I1205 06:17:36.126022 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:17:36 crc kubenswrapper[4652]: E1205 06:17:36.127015 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:17:48 crc kubenswrapper[4652]: I1205 06:17:48.130577 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:17:48 crc kubenswrapper[4652]: E1205 06:17:48.131367 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:18:00 crc kubenswrapper[4652]: I1205 06:18:00.125856 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:18:00 crc kubenswrapper[4652]: E1205 06:18:00.126319 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:18:11 crc kubenswrapper[4652]: I1205 06:18:11.125428 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:18:11 crc kubenswrapper[4652]: E1205 06:18:11.126348 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:18:25 crc kubenswrapper[4652]: I1205 06:18:25.126111 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:18:25 crc kubenswrapper[4652]: E1205 06:18:25.126990 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:18:39 crc kubenswrapper[4652]: I1205 06:18:39.126260 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:18:39 crc kubenswrapper[4652]: E1205 06:18:39.127329 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:18:50 crc kubenswrapper[4652]: I1205 06:18:50.125963 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:18:50 crc kubenswrapper[4652]: E1205 06:18:50.126543 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:19:03 crc kubenswrapper[4652]: I1205 06:19:03.125272 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:19:03 crc kubenswrapper[4652]: E1205 06:19:03.126090 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:19:18 crc kubenswrapper[4652]: I1205 06:19:18.131032 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:19:18 crc kubenswrapper[4652]: I1205 06:19:18.334773 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"eeed5dc0adbbb98f8a23bc40901139486222b2b5af4a824f671e55011c1e0ffb"} Dec 05 06:21:34 crc kubenswrapper[4652]: I1205 06:21:34.150901 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:21:34 crc kubenswrapper[4652]: I1205 06:21:34.151640 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:22:04 crc kubenswrapper[4652]: I1205 06:22:04.150985 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:22:04 crc kubenswrapper[4652]: I1205 06:22:04.152726 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:22:34 crc kubenswrapper[4652]: I1205 06:22:34.150230 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:22:34 crc kubenswrapper[4652]: I1205 06:22:34.150756 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:22:34 crc kubenswrapper[4652]: I1205 06:22:34.150807 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:22:34 crc kubenswrapper[4652]: I1205 06:22:34.151339 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eeed5dc0adbbb98f8a23bc40901139486222b2b5af4a824f671e55011c1e0ffb"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:22:34 crc kubenswrapper[4652]: I1205 06:22:34.151401 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://eeed5dc0adbbb98f8a23bc40901139486222b2b5af4a824f671e55011c1e0ffb" gracePeriod=600 Dec 05 06:22:35 crc kubenswrapper[4652]: I1205 06:22:35.120990 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="eeed5dc0adbbb98f8a23bc40901139486222b2b5af4a824f671e55011c1e0ffb" exitCode=0 Dec 05 06:22:35 crc kubenswrapper[4652]: I1205 06:22:35.121083 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"eeed5dc0adbbb98f8a23bc40901139486222b2b5af4a824f671e55011c1e0ffb"} Dec 05 06:22:35 crc kubenswrapper[4652]: I1205 06:22:35.121667 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474"} Dec 05 06:22:35 crc kubenswrapper[4652]: I1205 06:22:35.121699 4652 scope.go:117] "RemoveContainer" containerID="91f302083d68caba8f0a100e4ea9552d9308d5856983ff9a7d2a812aa71c2a79" Dec 05 06:24:34 crc kubenswrapper[4652]: I1205 06:24:34.150140 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:24:34 crc kubenswrapper[4652]: I1205 06:24:34.150619 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:25:04 crc kubenswrapper[4652]: I1205 06:25:04.150896 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:25:04 crc kubenswrapper[4652]: I1205 06:25:04.151265 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.150267 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.150650 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.150687 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.151058 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.151108 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" gracePeriod=600 Dec 05 06:25:34 crc kubenswrapper[4652]: E1205 06:25:34.186956 4652 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0331197d_08f0_4dec_8d8a_72e6019bd2eb.slice/crio-1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474.scope\": RecentStats: unable to find data in memory cache]" Dec 05 06:25:34 crc kubenswrapper[4652]: E1205 06:25:34.272669 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.577104 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" exitCode=0 Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.577178 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474"} Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.577360 4652 scope.go:117] "RemoveContainer" containerID="eeed5dc0adbbb98f8a23bc40901139486222b2b5af4a824f671e55011c1e0ffb" Dec 05 06:25:34 crc kubenswrapper[4652]: I1205 06:25:34.577746 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:25:34 crc kubenswrapper[4652]: E1205 06:25:34.577968 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:25:49 crc kubenswrapper[4652]: I1205 06:25:49.126295 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:25:49 crc kubenswrapper[4652]: E1205 06:25:49.127026 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:26:01 crc kubenswrapper[4652]: I1205 06:26:01.125380 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:26:01 crc kubenswrapper[4652]: E1205 06:26:01.126059 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:26:16 crc kubenswrapper[4652]: I1205 06:26:16.125601 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:26:16 crc kubenswrapper[4652]: E1205 06:26:16.127610 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:26:30 crc kubenswrapper[4652]: I1205 06:26:30.125835 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:26:30 crc kubenswrapper[4652]: E1205 06:26:30.126820 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:26:41 crc kubenswrapper[4652]: I1205 06:26:41.125671 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:26:41 crc kubenswrapper[4652]: E1205 06:26:41.126571 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:26:55 crc kubenswrapper[4652]: I1205 06:26:55.125365 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:26:55 crc kubenswrapper[4652]: E1205 06:26:55.125911 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:27:10 crc kubenswrapper[4652]: I1205 06:27:10.125754 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:27:10 crc kubenswrapper[4652]: E1205 06:27:10.126508 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.619705 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5v5jp"] Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620402 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" containerName="extract-utilities" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620414 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" containerName="extract-utilities" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620427 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620433 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620448 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620454 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620464 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerName="extract-content" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620469 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerName="extract-content" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620478 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerName="extract-content" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620485 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerName="extract-content" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620495 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620502 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620518 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerName="extract-content" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620524 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerName="extract-content" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620533 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620539 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620570 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerName="extract-utilities" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620576 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerName="extract-utilities" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620583 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerName="extract-utilities" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620589 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerName="extract-utilities" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620596 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerName="extract-utilities" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620601 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerName="extract-utilities" Dec 05 06:27:19 crc kubenswrapper[4652]: E1205 06:27:19.620611 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" containerName="extract-content" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620616 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" containerName="extract-content" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620770 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3410cec-3dfd-4a08-8ad4-5fcbe5fdf562" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620791 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7cddbe2-3da7-451a-a775-551708dabb54" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620798 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd737f0c-26b5-4182-b46f-04f6d2b804d5" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.620811 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="0216f080-70d5-4847-bea5-b9ed893d3cd1" containerName="registry-server" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.622005 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.627910 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5v5jp"] Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.721611 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q6k4\" (UniqueName: \"kubernetes.io/projected/dc88204d-453d-491e-aae3-9e8ccf33a6d2-kube-api-access-7q6k4\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.721831 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-catalog-content\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.721985 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-utilities\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.822802 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-utilities\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.822951 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q6k4\" (UniqueName: \"kubernetes.io/projected/dc88204d-453d-491e-aae3-9e8ccf33a6d2-kube-api-access-7q6k4\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.822978 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-catalog-content\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.823287 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-utilities\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.823337 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-catalog-content\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.839371 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q6k4\" (UniqueName: \"kubernetes.io/projected/dc88204d-453d-491e-aae3-9e8ccf33a6d2-kube-api-access-7q6k4\") pod \"redhat-marketplace-5v5jp\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:19 crc kubenswrapper[4652]: I1205 06:27:19.937861 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:20 crc kubenswrapper[4652]: I1205 06:27:20.347317 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5v5jp"] Dec 05 06:27:20 crc kubenswrapper[4652]: I1205 06:27:20.350378 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5v5jp" event={"ID":"dc88204d-453d-491e-aae3-9e8ccf33a6d2","Type":"ContainerStarted","Data":"9c9c15dce054d21a0bd62337695c913ebb06f851dd8003326a7a520d111ff9ae"} Dec 05 06:27:21 crc kubenswrapper[4652]: I1205 06:27:21.359536 4652 generic.go:334] "Generic (PLEG): container finished" podID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerID="52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6" exitCode=0 Dec 05 06:27:21 crc kubenswrapper[4652]: I1205 06:27:21.359591 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5v5jp" event={"ID":"dc88204d-453d-491e-aae3-9e8ccf33a6d2","Type":"ContainerDied","Data":"52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6"} Dec 05 06:27:21 crc kubenswrapper[4652]: I1205 06:27:21.361408 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:27:22 crc kubenswrapper[4652]: I1205 06:27:22.367841 4652 generic.go:334] "Generic (PLEG): container finished" podID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerID="3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3" exitCode=0 Dec 05 06:27:22 crc kubenswrapper[4652]: I1205 06:27:22.367900 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5v5jp" event={"ID":"dc88204d-453d-491e-aae3-9e8ccf33a6d2","Type":"ContainerDied","Data":"3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3"} Dec 05 06:27:23 crc kubenswrapper[4652]: I1205 06:27:23.377442 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5v5jp" event={"ID":"dc88204d-453d-491e-aae3-9e8ccf33a6d2","Type":"ContainerStarted","Data":"d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8"} Dec 05 06:27:23 crc kubenswrapper[4652]: I1205 06:27:23.397133 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5v5jp" podStartSLOduration=2.903973506 podStartE2EDuration="4.397118359s" podCreationTimestamp="2025-12-05 06:27:19 +0000 UTC" firstStartedPulling="2025-12-05 06:27:21.361178949 +0000 UTC m=+3643.597909216" lastFinishedPulling="2025-12-05 06:27:22.854323802 +0000 UTC m=+3645.091054069" observedRunningTime="2025-12-05 06:27:23.391227314 +0000 UTC m=+3645.627957582" watchObservedRunningTime="2025-12-05 06:27:23.397118359 +0000 UTC m=+3645.633848626" Dec 05 06:27:24 crc kubenswrapper[4652]: I1205 06:27:24.125662 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:27:24 crc kubenswrapper[4652]: E1205 06:27:24.126060 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:27:29 crc kubenswrapper[4652]: I1205 06:27:29.938082 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:29 crc kubenswrapper[4652]: I1205 06:27:29.938627 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:29 crc kubenswrapper[4652]: I1205 06:27:29.971048 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:30 crc kubenswrapper[4652]: I1205 06:27:30.474955 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:30 crc kubenswrapper[4652]: I1205 06:27:30.511369 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5v5jp"] Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.457013 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5v5jp" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerName="registry-server" containerID="cri-o://d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8" gracePeriod=2 Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.831989 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.853457 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q6k4\" (UniqueName: \"kubernetes.io/projected/dc88204d-453d-491e-aae3-9e8ccf33a6d2-kube-api-access-7q6k4\") pod \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.853678 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-utilities\") pod \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.853715 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-catalog-content\") pod \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\" (UID: \"dc88204d-453d-491e-aae3-9e8ccf33a6d2\") " Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.854379 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-utilities" (OuterVolumeSpecName: "utilities") pod "dc88204d-453d-491e-aae3-9e8ccf33a6d2" (UID: "dc88204d-453d-491e-aae3-9e8ccf33a6d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.861873 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc88204d-453d-491e-aae3-9e8ccf33a6d2-kube-api-access-7q6k4" (OuterVolumeSpecName: "kube-api-access-7q6k4") pod "dc88204d-453d-491e-aae3-9e8ccf33a6d2" (UID: "dc88204d-453d-491e-aae3-9e8ccf33a6d2"). InnerVolumeSpecName "kube-api-access-7q6k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.868707 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc88204d-453d-491e-aae3-9e8ccf33a6d2" (UID: "dc88204d-453d-491e-aae3-9e8ccf33a6d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.955763 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q6k4\" (UniqueName: \"kubernetes.io/projected/dc88204d-453d-491e-aae3-9e8ccf33a6d2-kube-api-access-7q6k4\") on node \"crc\" DevicePath \"\"" Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.955792 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:27:32 crc kubenswrapper[4652]: I1205 06:27:32.955806 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc88204d-453d-491e-aae3-9e8ccf33a6d2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.465029 4652 generic.go:334] "Generic (PLEG): container finished" podID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerID="d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8" exitCode=0 Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.465067 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5v5jp" event={"ID":"dc88204d-453d-491e-aae3-9e8ccf33a6d2","Type":"ContainerDied","Data":"d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8"} Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.465092 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5v5jp" event={"ID":"dc88204d-453d-491e-aae3-9e8ccf33a6d2","Type":"ContainerDied","Data":"9c9c15dce054d21a0bd62337695c913ebb06f851dd8003326a7a520d111ff9ae"} Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.465119 4652 scope.go:117] "RemoveContainer" containerID="d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.465154 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5v5jp" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.483278 4652 scope.go:117] "RemoveContainer" containerID="3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.495371 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5v5jp"] Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.501783 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5v5jp"] Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.519258 4652 scope.go:117] "RemoveContainer" containerID="52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.546076 4652 scope.go:117] "RemoveContainer" containerID="d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8" Dec 05 06:27:33 crc kubenswrapper[4652]: E1205 06:27:33.546516 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8\": container with ID starting with d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8 not found: ID does not exist" containerID="d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.546572 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8"} err="failed to get container status \"d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8\": rpc error: code = NotFound desc = could not find container \"d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8\": container with ID starting with d65271894c6bb5760a1e83a360b25a720237a0042f651c8856f25a85ea1b8ab8 not found: ID does not exist" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.546598 4652 scope.go:117] "RemoveContainer" containerID="3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3" Dec 05 06:27:33 crc kubenswrapper[4652]: E1205 06:27:33.546944 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3\": container with ID starting with 3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3 not found: ID does not exist" containerID="3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.546975 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3"} err="failed to get container status \"3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3\": rpc error: code = NotFound desc = could not find container \"3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3\": container with ID starting with 3a0aeb7c7f558c9d682da498ac7d9b854ae71af0a93dc2cd7cba78208a3000e3 not found: ID does not exist" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.547001 4652 scope.go:117] "RemoveContainer" containerID="52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6" Dec 05 06:27:33 crc kubenswrapper[4652]: E1205 06:27:33.547603 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6\": container with ID starting with 52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6 not found: ID does not exist" containerID="52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6" Dec 05 06:27:33 crc kubenswrapper[4652]: I1205 06:27:33.547635 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6"} err="failed to get container status \"52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6\": rpc error: code = NotFound desc = could not find container \"52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6\": container with ID starting with 52a8a344738116b5772708ba9e4087033980dd417a93e71667e93db5a60704d6 not found: ID does not exist" Dec 05 06:27:34 crc kubenswrapper[4652]: I1205 06:27:34.134079 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" path="/var/lib/kubelet/pods/dc88204d-453d-491e-aae3-9e8ccf33a6d2/volumes" Dec 05 06:27:35 crc kubenswrapper[4652]: I1205 06:27:35.126971 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:27:35 crc kubenswrapper[4652]: E1205 06:27:35.127499 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:27:49 crc kubenswrapper[4652]: I1205 06:27:49.126043 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:27:49 crc kubenswrapper[4652]: E1205 06:27:49.127895 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:28:01 crc kubenswrapper[4652]: I1205 06:28:01.126158 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:28:01 crc kubenswrapper[4652]: E1205 06:28:01.126983 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:28:02 crc kubenswrapper[4652]: I1205 06:28:02.922230 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6zq2g"] Dec 05 06:28:02 crc kubenswrapper[4652]: E1205 06:28:02.922820 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerName="extract-content" Dec 05 06:28:02 crc kubenswrapper[4652]: I1205 06:28:02.922835 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerName="extract-content" Dec 05 06:28:02 crc kubenswrapper[4652]: E1205 06:28:02.922857 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerName="registry-server" Dec 05 06:28:02 crc kubenswrapper[4652]: I1205 06:28:02.922862 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerName="registry-server" Dec 05 06:28:02 crc kubenswrapper[4652]: E1205 06:28:02.922882 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerName="extract-utilities" Dec 05 06:28:02 crc kubenswrapper[4652]: I1205 06:28:02.922888 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerName="extract-utilities" Dec 05 06:28:02 crc kubenswrapper[4652]: I1205 06:28:02.923082 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc88204d-453d-491e-aae3-9e8ccf33a6d2" containerName="registry-server" Dec 05 06:28:02 crc kubenswrapper[4652]: I1205 06:28:02.924385 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:02 crc kubenswrapper[4652]: I1205 06:28:02.934683 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6zq2g"] Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.073953 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-catalog-content\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.074734 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-utilities\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.074841 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-596mz\" (UniqueName: \"kubernetes.io/projected/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-kube-api-access-596mz\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.177483 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-catalog-content\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.177577 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-utilities\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.177628 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-596mz\" (UniqueName: \"kubernetes.io/projected/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-kube-api-access-596mz\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.177874 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-catalog-content\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.177946 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-utilities\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.193711 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-596mz\" (UniqueName: \"kubernetes.io/projected/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-kube-api-access-596mz\") pod \"certified-operators-6zq2g\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.247517 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:03 crc kubenswrapper[4652]: I1205 06:28:03.676047 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6zq2g"] Dec 05 06:28:04 crc kubenswrapper[4652]: I1205 06:28:04.681349 4652 generic.go:334] "Generic (PLEG): container finished" podID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerID="418424b57f8a767006d0d5559f24c8c0c314c7e09b1355cdc5fb8628ee1ad6b4" exitCode=0 Dec 05 06:28:04 crc kubenswrapper[4652]: I1205 06:28:04.681387 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zq2g" event={"ID":"79d4a1ac-55f1-45e2-ac41-54371d1f2eec","Type":"ContainerDied","Data":"418424b57f8a767006d0d5559f24c8c0c314c7e09b1355cdc5fb8628ee1ad6b4"} Dec 05 06:28:04 crc kubenswrapper[4652]: I1205 06:28:04.681584 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zq2g" event={"ID":"79d4a1ac-55f1-45e2-ac41-54371d1f2eec","Type":"ContainerStarted","Data":"43df5f55556a6c1213da1398b64e54687ce46b0e4988ec9b9f8ff68390d889a8"} Dec 05 06:28:05 crc kubenswrapper[4652]: I1205 06:28:05.691053 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zq2g" event={"ID":"79d4a1ac-55f1-45e2-ac41-54371d1f2eec","Type":"ContainerStarted","Data":"17854a33f9df347ee0b75d55bd0b848462630b1c6f0ba621dad1cf7f7508de2d"} Dec 05 06:28:06 crc kubenswrapper[4652]: I1205 06:28:06.699937 4652 generic.go:334] "Generic (PLEG): container finished" podID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerID="17854a33f9df347ee0b75d55bd0b848462630b1c6f0ba621dad1cf7f7508de2d" exitCode=0 Dec 05 06:28:06 crc kubenswrapper[4652]: I1205 06:28:06.699976 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zq2g" event={"ID":"79d4a1ac-55f1-45e2-ac41-54371d1f2eec","Type":"ContainerDied","Data":"17854a33f9df347ee0b75d55bd0b848462630b1c6f0ba621dad1cf7f7508de2d"} Dec 05 06:28:07 crc kubenswrapper[4652]: I1205 06:28:07.709129 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zq2g" event={"ID":"79d4a1ac-55f1-45e2-ac41-54371d1f2eec","Type":"ContainerStarted","Data":"238a4da2361b2a046854b9a8588b1aa04b76410251e214ac5bafa2e67a4322f9"} Dec 05 06:28:07 crc kubenswrapper[4652]: I1205 06:28:07.723945 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6zq2g" podStartSLOduration=3.172861871 podStartE2EDuration="5.723927183s" podCreationTimestamp="2025-12-05 06:28:02 +0000 UTC" firstStartedPulling="2025-12-05 06:28:04.684249552 +0000 UTC m=+3686.920979819" lastFinishedPulling="2025-12-05 06:28:07.235314864 +0000 UTC m=+3689.472045131" observedRunningTime="2025-12-05 06:28:07.721193083 +0000 UTC m=+3689.957923361" watchObservedRunningTime="2025-12-05 06:28:07.723927183 +0000 UTC m=+3689.960657451" Dec 05 06:28:12 crc kubenswrapper[4652]: I1205 06:28:12.890622 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zw85h"] Dec 05 06:28:12 crc kubenswrapper[4652]: I1205 06:28:12.893287 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:12 crc kubenswrapper[4652]: I1205 06:28:12.905865 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zw85h"] Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.001114 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-catalog-content\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.001249 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-utilities\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.001294 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n4wx\" (UniqueName: \"kubernetes.io/projected/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-kube-api-access-7n4wx\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.103201 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-catalog-content\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.103280 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-utilities\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.103311 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n4wx\" (UniqueName: \"kubernetes.io/projected/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-kube-api-access-7n4wx\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.103888 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-utilities\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.103910 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-catalog-content\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.120588 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n4wx\" (UniqueName: \"kubernetes.io/projected/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-kube-api-access-7n4wx\") pod \"redhat-operators-zw85h\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.125418 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:28:13 crc kubenswrapper[4652]: E1205 06:28:13.125675 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.219798 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.248547 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.248616 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.291761 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.657155 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zw85h"] Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.774646 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zw85h" event={"ID":"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f","Type":"ContainerStarted","Data":"c20a0eb58d31deb283efcfb298c639106e3fafe5ec4a2dc81f769217db75d774"} Dec 05 06:28:13 crc kubenswrapper[4652]: I1205 06:28:13.830627 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:14 crc kubenswrapper[4652]: I1205 06:28:14.784583 4652 generic.go:334] "Generic (PLEG): container finished" podID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerID="c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46" exitCode=0 Dec 05 06:28:14 crc kubenswrapper[4652]: I1205 06:28:14.784675 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zw85h" event={"ID":"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f","Type":"ContainerDied","Data":"c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46"} Dec 05 06:28:15 crc kubenswrapper[4652]: I1205 06:28:15.670033 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6zq2g"] Dec 05 06:28:15 crc kubenswrapper[4652]: I1205 06:28:15.797409 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zw85h" event={"ID":"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f","Type":"ContainerStarted","Data":"747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30"} Dec 05 06:28:15 crc kubenswrapper[4652]: I1205 06:28:15.797620 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6zq2g" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerName="registry-server" containerID="cri-o://238a4da2361b2a046854b9a8588b1aa04b76410251e214ac5bafa2e67a4322f9" gracePeriod=2 Dec 05 06:28:16 crc kubenswrapper[4652]: I1205 06:28:16.808933 4652 generic.go:334] "Generic (PLEG): container finished" podID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerID="238a4da2361b2a046854b9a8588b1aa04b76410251e214ac5bafa2e67a4322f9" exitCode=0 Dec 05 06:28:16 crc kubenswrapper[4652]: I1205 06:28:16.808991 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zq2g" event={"ID":"79d4a1ac-55f1-45e2-ac41-54371d1f2eec","Type":"ContainerDied","Data":"238a4da2361b2a046854b9a8588b1aa04b76410251e214ac5bafa2e67a4322f9"} Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.197113 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.310412 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-catalog-content\") pod \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.310593 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-596mz\" (UniqueName: \"kubernetes.io/projected/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-kube-api-access-596mz\") pod \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.310657 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-utilities\") pod \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\" (UID: \"79d4a1ac-55f1-45e2-ac41-54371d1f2eec\") " Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.311325 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-utilities" (OuterVolumeSpecName: "utilities") pod "79d4a1ac-55f1-45e2-ac41-54371d1f2eec" (UID: "79d4a1ac-55f1-45e2-ac41-54371d1f2eec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.313375 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.316781 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-kube-api-access-596mz" (OuterVolumeSpecName: "kube-api-access-596mz") pod "79d4a1ac-55f1-45e2-ac41-54371d1f2eec" (UID: "79d4a1ac-55f1-45e2-ac41-54371d1f2eec"). InnerVolumeSpecName "kube-api-access-596mz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.339764 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79d4a1ac-55f1-45e2-ac41-54371d1f2eec" (UID: "79d4a1ac-55f1-45e2-ac41-54371d1f2eec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.415321 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.415355 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-596mz\" (UniqueName: \"kubernetes.io/projected/79d4a1ac-55f1-45e2-ac41-54371d1f2eec-kube-api-access-596mz\") on node \"crc\" DevicePath \"\"" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.820424 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zq2g" event={"ID":"79d4a1ac-55f1-45e2-ac41-54371d1f2eec","Type":"ContainerDied","Data":"43df5f55556a6c1213da1398b64e54687ce46b0e4988ec9b9f8ff68390d889a8"} Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.820485 4652 scope.go:117] "RemoveContainer" containerID="238a4da2361b2a046854b9a8588b1aa04b76410251e214ac5bafa2e67a4322f9" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.820511 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zq2g" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.843381 4652 scope.go:117] "RemoveContainer" containerID="17854a33f9df347ee0b75d55bd0b848462630b1c6f0ba621dad1cf7f7508de2d" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.854375 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6zq2g"] Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.861966 4652 scope.go:117] "RemoveContainer" containerID="418424b57f8a767006d0d5559f24c8c0c314c7e09b1355cdc5fb8628ee1ad6b4" Dec 05 06:28:17 crc kubenswrapper[4652]: I1205 06:28:17.865007 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6zq2g"] Dec 05 06:28:18 crc kubenswrapper[4652]: I1205 06:28:18.136008 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" path="/var/lib/kubelet/pods/79d4a1ac-55f1-45e2-ac41-54371d1f2eec/volumes" Dec 05 06:28:18 crc kubenswrapper[4652]: I1205 06:28:18.830157 4652 generic.go:334] "Generic (PLEG): container finished" podID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerID="747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30" exitCode=0 Dec 05 06:28:18 crc kubenswrapper[4652]: I1205 06:28:18.830223 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zw85h" event={"ID":"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f","Type":"ContainerDied","Data":"747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30"} Dec 05 06:28:19 crc kubenswrapper[4652]: I1205 06:28:19.846009 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zw85h" event={"ID":"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f","Type":"ContainerStarted","Data":"12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d"} Dec 05 06:28:19 crc kubenswrapper[4652]: I1205 06:28:19.867968 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zw85h" podStartSLOduration=3.326805625 podStartE2EDuration="7.86795308s" podCreationTimestamp="2025-12-05 06:28:12 +0000 UTC" firstStartedPulling="2025-12-05 06:28:14.786547538 +0000 UTC m=+3697.023277805" lastFinishedPulling="2025-12-05 06:28:19.327694992 +0000 UTC m=+3701.564425260" observedRunningTime="2025-12-05 06:28:19.864669839 +0000 UTC m=+3702.101400106" watchObservedRunningTime="2025-12-05 06:28:19.86795308 +0000 UTC m=+3702.104683347" Dec 05 06:28:23 crc kubenswrapper[4652]: I1205 06:28:23.220733 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:23 crc kubenswrapper[4652]: I1205 06:28:23.221189 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:24 crc kubenswrapper[4652]: I1205 06:28:24.256652 4652 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zw85h" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="registry-server" probeResult="failure" output=< Dec 05 06:28:24 crc kubenswrapper[4652]: timeout: failed to connect service ":50051" within 1s Dec 05 06:28:24 crc kubenswrapper[4652]: > Dec 05 06:28:25 crc kubenswrapper[4652]: I1205 06:28:25.125377 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:28:25 crc kubenswrapper[4652]: E1205 06:28:25.125697 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:28:33 crc kubenswrapper[4652]: I1205 06:28:33.255392 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:33 crc kubenswrapper[4652]: I1205 06:28:33.291228 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:34 crc kubenswrapper[4652]: I1205 06:28:34.134158 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zw85h"] Dec 05 06:28:34 crc kubenswrapper[4652]: I1205 06:28:34.964724 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zw85h" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="registry-server" containerID="cri-o://12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d" gracePeriod=2 Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.348210 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.448898 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-utilities\") pod \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.449135 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-catalog-content\") pod \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.449294 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7n4wx\" (UniqueName: \"kubernetes.io/projected/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-kube-api-access-7n4wx\") pod \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\" (UID: \"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f\") " Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.454715 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-kube-api-access-7n4wx" (OuterVolumeSpecName: "kube-api-access-7n4wx") pod "ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" (UID: "ea0eaf05-f950-4d20-96f6-ca8a79dddb9f"). InnerVolumeSpecName "kube-api-access-7n4wx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.472584 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-utilities" (OuterVolumeSpecName: "utilities") pod "ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" (UID: "ea0eaf05-f950-4d20-96f6-ca8a79dddb9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.549197 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" (UID: "ea0eaf05-f950-4d20-96f6-ca8a79dddb9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.552799 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7n4wx\" (UniqueName: \"kubernetes.io/projected/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-kube-api-access-7n4wx\") on node \"crc\" DevicePath \"\"" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.552830 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.552840 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.974137 4652 generic.go:334] "Generic (PLEG): container finished" podID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerID="12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d" exitCode=0 Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.974179 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zw85h" event={"ID":"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f","Type":"ContainerDied","Data":"12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d"} Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.974286 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zw85h" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.974327 4652 scope.go:117] "RemoveContainer" containerID="12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d" Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.974508 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zw85h" event={"ID":"ea0eaf05-f950-4d20-96f6-ca8a79dddb9f","Type":"ContainerDied","Data":"c20a0eb58d31deb283efcfb298c639106e3fafe5ec4a2dc81f769217db75d774"} Dec 05 06:28:35 crc kubenswrapper[4652]: I1205 06:28:35.991614 4652 scope.go:117] "RemoveContainer" containerID="747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.001123 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zw85h"] Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.008254 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zw85h"] Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.026036 4652 scope.go:117] "RemoveContainer" containerID="c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.046186 4652 scope.go:117] "RemoveContainer" containerID="12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d" Dec 05 06:28:36 crc kubenswrapper[4652]: E1205 06:28:36.046592 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d\": container with ID starting with 12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d not found: ID does not exist" containerID="12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.046679 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d"} err="failed to get container status \"12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d\": rpc error: code = NotFound desc = could not find container \"12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d\": container with ID starting with 12b1d0c362b371372ddf74cdc15976f7be329996afffc96bac5de0849410e19d not found: ID does not exist" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.046771 4652 scope.go:117] "RemoveContainer" containerID="747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30" Dec 05 06:28:36 crc kubenswrapper[4652]: E1205 06:28:36.047095 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30\": container with ID starting with 747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30 not found: ID does not exist" containerID="747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.047175 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30"} err="failed to get container status \"747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30\": rpc error: code = NotFound desc = could not find container \"747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30\": container with ID starting with 747e1bfd3b14b3072eba06984a3414d15b5025947df727e9f14c2dbbdffaef30 not found: ID does not exist" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.047271 4652 scope.go:117] "RemoveContainer" containerID="c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46" Dec 05 06:28:36 crc kubenswrapper[4652]: E1205 06:28:36.047590 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46\": container with ID starting with c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46 not found: ID does not exist" containerID="c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.047678 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46"} err="failed to get container status \"c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46\": rpc error: code = NotFound desc = could not find container \"c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46\": container with ID starting with c7f5327494f30a64317ea0f957272b5073c99fb32cfc82484dfcecc1ce3dbd46 not found: ID does not exist" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.125964 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:28:36 crc kubenswrapper[4652]: E1205 06:28:36.126334 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:28:36 crc kubenswrapper[4652]: I1205 06:28:36.136064 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" path="/var/lib/kubelet/pods/ea0eaf05-f950-4d20-96f6-ca8a79dddb9f/volumes" Dec 05 06:28:49 crc kubenswrapper[4652]: I1205 06:28:49.125545 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:28:49 crc kubenswrapper[4652]: E1205 06:28:49.126209 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:29:04 crc kubenswrapper[4652]: I1205 06:29:04.125648 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:29:04 crc kubenswrapper[4652]: E1205 06:29:04.126909 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:29:17 crc kubenswrapper[4652]: I1205 06:29:17.125912 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:29:17 crc kubenswrapper[4652]: E1205 06:29:17.126672 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:29:30 crc kubenswrapper[4652]: I1205 06:29:30.125695 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:29:30 crc kubenswrapper[4652]: E1205 06:29:30.126351 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:29:42 crc kubenswrapper[4652]: I1205 06:29:42.125847 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:29:42 crc kubenswrapper[4652]: E1205 06:29:42.126633 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:29:53 crc kubenswrapper[4652]: I1205 06:29:53.126264 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:29:53 crc kubenswrapper[4652]: E1205 06:29:53.126931 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.163652 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld"] Dec 05 06:30:00 crc kubenswrapper[4652]: E1205 06:30:00.164654 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.164670 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4652]: E1205 06:30:00.164682 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.164688 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4652]: E1205 06:30:00.164695 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.164701 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4652]: E1205 06:30:00.164714 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.164720 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4652]: E1205 06:30:00.164737 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.164742 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4652]: E1205 06:30:00.164759 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.164764 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.165017 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="79d4a1ac-55f1-45e2-ac41-54371d1f2eec" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.165037 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea0eaf05-f950-4d20-96f6-ca8a79dddb9f" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.165807 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.167832 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.167938 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.175574 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld"] Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.303138 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c9915e20-7c46-4243-b40b-8f4a86760b11-config-volume\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.303218 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c9915e20-7c46-4243-b40b-8f4a86760b11-secret-volume\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.303249 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mdqx\" (UniqueName: \"kubernetes.io/projected/c9915e20-7c46-4243-b40b-8f4a86760b11-kube-api-access-7mdqx\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.404467 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c9915e20-7c46-4243-b40b-8f4a86760b11-config-volume\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.404541 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c9915e20-7c46-4243-b40b-8f4a86760b11-secret-volume\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.404584 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mdqx\" (UniqueName: \"kubernetes.io/projected/c9915e20-7c46-4243-b40b-8f4a86760b11-kube-api-access-7mdqx\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.405574 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c9915e20-7c46-4243-b40b-8f4a86760b11-config-volume\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.410880 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c9915e20-7c46-4243-b40b-8f4a86760b11-secret-volume\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.420282 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mdqx\" (UniqueName: \"kubernetes.io/projected/c9915e20-7c46-4243-b40b-8f4a86760b11-kube-api-access-7mdqx\") pod \"collect-profiles-29415270-5fgld\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.493732 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:00 crc kubenswrapper[4652]: I1205 06:30:00.945339 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld"] Dec 05 06:30:01 crc kubenswrapper[4652]: I1205 06:30:01.630304 4652 generic.go:334] "Generic (PLEG): container finished" podID="c9915e20-7c46-4243-b40b-8f4a86760b11" containerID="c56e1ef8b86012688d9a6f22bb5efe1a0519f3784f6b3ee0c80376b530cad121" exitCode=0 Dec 05 06:30:01 crc kubenswrapper[4652]: I1205 06:30:01.630372 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" event={"ID":"c9915e20-7c46-4243-b40b-8f4a86760b11","Type":"ContainerDied","Data":"c56e1ef8b86012688d9a6f22bb5efe1a0519f3784f6b3ee0c80376b530cad121"} Dec 05 06:30:01 crc kubenswrapper[4652]: I1205 06:30:01.631595 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" event={"ID":"c9915e20-7c46-4243-b40b-8f4a86760b11","Type":"ContainerStarted","Data":"272cee7e4c9c841b3a7a79ab1f6fb87642f6f5a8fecf8d1afee598c694c3936d"} Dec 05 06:30:02 crc kubenswrapper[4652]: I1205 06:30:02.944517 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.085796 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c9915e20-7c46-4243-b40b-8f4a86760b11-secret-volume\") pod \"c9915e20-7c46-4243-b40b-8f4a86760b11\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.086158 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c9915e20-7c46-4243-b40b-8f4a86760b11-config-volume\") pod \"c9915e20-7c46-4243-b40b-8f4a86760b11\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.086332 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mdqx\" (UniqueName: \"kubernetes.io/projected/c9915e20-7c46-4243-b40b-8f4a86760b11-kube-api-access-7mdqx\") pod \"c9915e20-7c46-4243-b40b-8f4a86760b11\" (UID: \"c9915e20-7c46-4243-b40b-8f4a86760b11\") " Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.088144 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9915e20-7c46-4243-b40b-8f4a86760b11-config-volume" (OuterVolumeSpecName: "config-volume") pod "c9915e20-7c46-4243-b40b-8f4a86760b11" (UID: "c9915e20-7c46-4243-b40b-8f4a86760b11"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.092922 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9915e20-7c46-4243-b40b-8f4a86760b11-kube-api-access-7mdqx" (OuterVolumeSpecName: "kube-api-access-7mdqx") pod "c9915e20-7c46-4243-b40b-8f4a86760b11" (UID: "c9915e20-7c46-4243-b40b-8f4a86760b11"). InnerVolumeSpecName "kube-api-access-7mdqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.093452 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9915e20-7c46-4243-b40b-8f4a86760b11-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c9915e20-7c46-4243-b40b-8f4a86760b11" (UID: "c9915e20-7c46-4243-b40b-8f4a86760b11"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.189040 4652 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c9915e20-7c46-4243-b40b-8f4a86760b11-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.189885 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mdqx\" (UniqueName: \"kubernetes.io/projected/c9915e20-7c46-4243-b40b-8f4a86760b11-kube-api-access-7mdqx\") on node \"crc\" DevicePath \"\"" Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.189954 4652 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c9915e20-7c46-4243-b40b-8f4a86760b11-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.650537 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" event={"ID":"c9915e20-7c46-4243-b40b-8f4a86760b11","Type":"ContainerDied","Data":"272cee7e4c9c841b3a7a79ab1f6fb87642f6f5a8fecf8d1afee598c694c3936d"} Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.650600 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="272cee7e4c9c841b3a7a79ab1f6fb87642f6f5a8fecf8d1afee598c694c3936d" Dec 05 06:30:03 crc kubenswrapper[4652]: I1205 06:30:03.650644 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-5fgld" Dec 05 06:30:04 crc kubenswrapper[4652]: I1205 06:30:04.016112 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94"] Dec 05 06:30:04 crc kubenswrapper[4652]: I1205 06:30:04.025075 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-k4b94"] Dec 05 06:30:04 crc kubenswrapper[4652]: I1205 06:30:04.137800 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99af3ed8-4da1-4029-8876-20a311b1762c" path="/var/lib/kubelet/pods/99af3ed8-4da1-4029-8876-20a311b1762c/volumes" Dec 05 06:30:07 crc kubenswrapper[4652]: I1205 06:30:07.125986 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:30:07 crc kubenswrapper[4652]: E1205 06:30:07.126602 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:30:20 crc kubenswrapper[4652]: I1205 06:30:20.125400 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:30:20 crc kubenswrapper[4652]: E1205 06:30:20.126345 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:30:33 crc kubenswrapper[4652]: I1205 06:30:33.126010 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:30:33 crc kubenswrapper[4652]: E1205 06:30:33.126754 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:30:41 crc kubenswrapper[4652]: I1205 06:30:41.906235 4652 scope.go:117] "RemoveContainer" containerID="74d8266a7df36f6402a9882502e17b2a2ee6ce84da1486bd90de96e5b94c14e3" Dec 05 06:30:48 crc kubenswrapper[4652]: I1205 06:30:48.133001 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:30:49 crc kubenswrapper[4652]: I1205 06:30:49.010222 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"b0ff29e206b2a91943bc018f74a1cd14ffdc46ab63c50c561dc11f726f47a505"} Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.154956 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mn25r"] Dec 05 06:32:00 crc kubenswrapper[4652]: E1205 06:32:00.156408 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9915e20-7c46-4243-b40b-8f4a86760b11" containerName="collect-profiles" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.156424 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9915e20-7c46-4243-b40b-8f4a86760b11" containerName="collect-profiles" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.156719 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9915e20-7c46-4243-b40b-8f4a86760b11" containerName="collect-profiles" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.158285 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.164706 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mn25r"] Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.290577 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-catalog-content\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.290814 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-utilities\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.290928 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st9x2\" (UniqueName: \"kubernetes.io/projected/f142331e-4024-481d-85f7-6acd795848fe-kube-api-access-st9x2\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.393509 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-utilities\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.393638 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st9x2\" (UniqueName: \"kubernetes.io/projected/f142331e-4024-481d-85f7-6acd795848fe-kube-api-access-st9x2\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.393928 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-catalog-content\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.394086 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-utilities\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.394435 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-catalog-content\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.425127 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st9x2\" (UniqueName: \"kubernetes.io/projected/f142331e-4024-481d-85f7-6acd795848fe-kube-api-access-st9x2\") pod \"community-operators-mn25r\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.481437 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:00 crc kubenswrapper[4652]: I1205 06:32:00.969525 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mn25r"] Dec 05 06:32:01 crc kubenswrapper[4652]: I1205 06:32:01.627369 4652 generic.go:334] "Generic (PLEG): container finished" podID="f142331e-4024-481d-85f7-6acd795848fe" containerID="bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6" exitCode=0 Dec 05 06:32:01 crc kubenswrapper[4652]: I1205 06:32:01.627423 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mn25r" event={"ID":"f142331e-4024-481d-85f7-6acd795848fe","Type":"ContainerDied","Data":"bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6"} Dec 05 06:32:01 crc kubenswrapper[4652]: I1205 06:32:01.627450 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mn25r" event={"ID":"f142331e-4024-481d-85f7-6acd795848fe","Type":"ContainerStarted","Data":"78f3d546febe7bdfd542414fc6f73df548d96afc5de60588a1ac51740a422848"} Dec 05 06:32:02 crc kubenswrapper[4652]: I1205 06:32:02.639724 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mn25r" event={"ID":"f142331e-4024-481d-85f7-6acd795848fe","Type":"ContainerStarted","Data":"5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0"} Dec 05 06:32:03 crc kubenswrapper[4652]: I1205 06:32:03.655029 4652 generic.go:334] "Generic (PLEG): container finished" podID="f142331e-4024-481d-85f7-6acd795848fe" containerID="5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0" exitCode=0 Dec 05 06:32:03 crc kubenswrapper[4652]: I1205 06:32:03.655117 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mn25r" event={"ID":"f142331e-4024-481d-85f7-6acd795848fe","Type":"ContainerDied","Data":"5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0"} Dec 05 06:32:04 crc kubenswrapper[4652]: I1205 06:32:04.669308 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mn25r" event={"ID":"f142331e-4024-481d-85f7-6acd795848fe","Type":"ContainerStarted","Data":"c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533"} Dec 05 06:32:04 crc kubenswrapper[4652]: I1205 06:32:04.684996 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mn25r" podStartSLOduration=2.197722211 podStartE2EDuration="4.684977003s" podCreationTimestamp="2025-12-05 06:32:00 +0000 UTC" firstStartedPulling="2025-12-05 06:32:01.628647519 +0000 UTC m=+3923.865377785" lastFinishedPulling="2025-12-05 06:32:04.115902309 +0000 UTC m=+3926.352632577" observedRunningTime="2025-12-05 06:32:04.682128649 +0000 UTC m=+3926.918858916" watchObservedRunningTime="2025-12-05 06:32:04.684977003 +0000 UTC m=+3926.921707260" Dec 05 06:32:10 crc kubenswrapper[4652]: I1205 06:32:10.482429 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:10 crc kubenswrapper[4652]: I1205 06:32:10.482816 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:10 crc kubenswrapper[4652]: I1205 06:32:10.519893 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:10 crc kubenswrapper[4652]: I1205 06:32:10.744283 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:10 crc kubenswrapper[4652]: I1205 06:32:10.782127 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mn25r"] Dec 05 06:32:12 crc kubenswrapper[4652]: I1205 06:32:12.726052 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mn25r" podUID="f142331e-4024-481d-85f7-6acd795848fe" containerName="registry-server" containerID="cri-o://c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533" gracePeriod=2 Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.481156 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.541683 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-utilities\") pod \"f142331e-4024-481d-85f7-6acd795848fe\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.541795 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-catalog-content\") pod \"f142331e-4024-481d-85f7-6acd795848fe\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.541849 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-st9x2\" (UniqueName: \"kubernetes.io/projected/f142331e-4024-481d-85f7-6acd795848fe-kube-api-access-st9x2\") pod \"f142331e-4024-481d-85f7-6acd795848fe\" (UID: \"f142331e-4024-481d-85f7-6acd795848fe\") " Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.542317 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-utilities" (OuterVolumeSpecName: "utilities") pod "f142331e-4024-481d-85f7-6acd795848fe" (UID: "f142331e-4024-481d-85f7-6acd795848fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.546288 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f142331e-4024-481d-85f7-6acd795848fe-kube-api-access-st9x2" (OuterVolumeSpecName: "kube-api-access-st9x2") pod "f142331e-4024-481d-85f7-6acd795848fe" (UID: "f142331e-4024-481d-85f7-6acd795848fe"). InnerVolumeSpecName "kube-api-access-st9x2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.583968 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f142331e-4024-481d-85f7-6acd795848fe" (UID: "f142331e-4024-481d-85f7-6acd795848fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.643789 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.643815 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-st9x2\" (UniqueName: \"kubernetes.io/projected/f142331e-4024-481d-85f7-6acd795848fe-kube-api-access-st9x2\") on node \"crc\" DevicePath \"\"" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.643826 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f142331e-4024-481d-85f7-6acd795848fe-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.737236 4652 generic.go:334] "Generic (PLEG): container finished" podID="f142331e-4024-481d-85f7-6acd795848fe" containerID="c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533" exitCode=0 Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.737279 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mn25r" event={"ID":"f142331e-4024-481d-85f7-6acd795848fe","Type":"ContainerDied","Data":"c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533"} Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.737305 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mn25r" event={"ID":"f142331e-4024-481d-85f7-6acd795848fe","Type":"ContainerDied","Data":"78f3d546febe7bdfd542414fc6f73df548d96afc5de60588a1ac51740a422848"} Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.737321 4652 scope.go:117] "RemoveContainer" containerID="c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.737338 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mn25r" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.758522 4652 scope.go:117] "RemoveContainer" containerID="5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.768195 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mn25r"] Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.775967 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mn25r"] Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.789811 4652 scope.go:117] "RemoveContainer" containerID="bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.809482 4652 scope.go:117] "RemoveContainer" containerID="c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533" Dec 05 06:32:13 crc kubenswrapper[4652]: E1205 06:32:13.809832 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533\": container with ID starting with c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533 not found: ID does not exist" containerID="c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.809883 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533"} err="failed to get container status \"c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533\": rpc error: code = NotFound desc = could not find container \"c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533\": container with ID starting with c743cc47588f2440fe3fb777dee7d7342adf8df638f4609432ca6d6749784533 not found: ID does not exist" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.809909 4652 scope.go:117] "RemoveContainer" containerID="5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0" Dec 05 06:32:13 crc kubenswrapper[4652]: E1205 06:32:13.810230 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0\": container with ID starting with 5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0 not found: ID does not exist" containerID="5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.810270 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0"} err="failed to get container status \"5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0\": rpc error: code = NotFound desc = could not find container \"5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0\": container with ID starting with 5aebba27e4fd1cfef4161c8605e8ed4266bbea01328b6e6b7a3bd37d79fadab0 not found: ID does not exist" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.810299 4652 scope.go:117] "RemoveContainer" containerID="bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6" Dec 05 06:32:13 crc kubenswrapper[4652]: E1205 06:32:13.810620 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6\": container with ID starting with bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6 not found: ID does not exist" containerID="bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6" Dec 05 06:32:13 crc kubenswrapper[4652]: I1205 06:32:13.810645 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6"} err="failed to get container status \"bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6\": rpc error: code = NotFound desc = could not find container \"bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6\": container with ID starting with bc81891f6a7247059b4ece7926f9628c138023fa9db8fa5d02e38a7927f344a6 not found: ID does not exist" Dec 05 06:32:14 crc kubenswrapper[4652]: I1205 06:32:14.135364 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f142331e-4024-481d-85f7-6acd795848fe" path="/var/lib/kubelet/pods/f142331e-4024-481d-85f7-6acd795848fe/volumes" Dec 05 06:33:04 crc kubenswrapper[4652]: I1205 06:33:04.150419 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:33:04 crc kubenswrapper[4652]: I1205 06:33:04.150760 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:33:34 crc kubenswrapper[4652]: I1205 06:33:34.150190 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:33:34 crc kubenswrapper[4652]: I1205 06:33:34.150567 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:34:04 crc kubenswrapper[4652]: I1205 06:34:04.150827 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:34:04 crc kubenswrapper[4652]: I1205 06:34:04.151243 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:34:04 crc kubenswrapper[4652]: I1205 06:34:04.151276 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:34:04 crc kubenswrapper[4652]: I1205 06:34:04.151789 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b0ff29e206b2a91943bc018f74a1cd14ffdc46ab63c50c561dc11f726f47a505"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:34:04 crc kubenswrapper[4652]: I1205 06:34:04.151839 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://b0ff29e206b2a91943bc018f74a1cd14ffdc46ab63c50c561dc11f726f47a505" gracePeriod=600 Dec 05 06:34:04 crc kubenswrapper[4652]: I1205 06:34:04.520906 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="b0ff29e206b2a91943bc018f74a1cd14ffdc46ab63c50c561dc11f726f47a505" exitCode=0 Dec 05 06:34:04 crc kubenswrapper[4652]: I1205 06:34:04.520941 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"b0ff29e206b2a91943bc018f74a1cd14ffdc46ab63c50c561dc11f726f47a505"} Dec 05 06:34:04 crc kubenswrapper[4652]: I1205 06:34:04.520971 4652 scope.go:117] "RemoveContainer" containerID="1fe1ebfaa386c6431bf771a0f0d93db2d131c4a2ae6cc48db3ed27c5fbc1a474" Dec 05 06:34:05 crc kubenswrapper[4652]: I1205 06:34:05.529134 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f"} Dec 05 06:36:04 crc kubenswrapper[4652]: I1205 06:36:04.150225 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:36:04 crc kubenswrapper[4652]: I1205 06:36:04.150877 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:36:34 crc kubenswrapper[4652]: I1205 06:36:34.150184 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:36:34 crc kubenswrapper[4652]: I1205 06:36:34.151251 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.150405 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.150979 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.151031 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.151951 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.152014 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" gracePeriod=600 Dec 05 06:37:04 crc kubenswrapper[4652]: E1205 06:37:04.271651 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.988908 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" exitCode=0 Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.988952 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f"} Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.988990 4652 scope.go:117] "RemoveContainer" containerID="b0ff29e206b2a91943bc018f74a1cd14ffdc46ab63c50c561dc11f726f47a505" Dec 05 06:37:04 crc kubenswrapper[4652]: I1205 06:37:04.989488 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:37:04 crc kubenswrapper[4652]: E1205 06:37:04.989802 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:37:20 crc kubenswrapper[4652]: I1205 06:37:20.135078 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:37:20 crc kubenswrapper[4652]: E1205 06:37:20.138246 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.599263 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d86bl"] Dec 05 06:37:29 crc kubenswrapper[4652]: E1205 06:37:29.600209 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f142331e-4024-481d-85f7-6acd795848fe" containerName="extract-content" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.600224 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f142331e-4024-481d-85f7-6acd795848fe" containerName="extract-content" Dec 05 06:37:29 crc kubenswrapper[4652]: E1205 06:37:29.600251 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f142331e-4024-481d-85f7-6acd795848fe" containerName="extract-utilities" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.600259 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f142331e-4024-481d-85f7-6acd795848fe" containerName="extract-utilities" Dec 05 06:37:29 crc kubenswrapper[4652]: E1205 06:37:29.600291 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f142331e-4024-481d-85f7-6acd795848fe" containerName="registry-server" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.600297 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="f142331e-4024-481d-85f7-6acd795848fe" containerName="registry-server" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.600767 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="f142331e-4024-481d-85f7-6acd795848fe" containerName="registry-server" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.612182 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.615421 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d86bl"] Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.652460 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2stf4\" (UniqueName: \"kubernetes.io/projected/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-kube-api-access-2stf4\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.652508 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-catalog-content\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.652785 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-utilities\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.755119 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-utilities\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.755389 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2stf4\" (UniqueName: \"kubernetes.io/projected/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-kube-api-access-2stf4\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.755427 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-catalog-content\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.756290 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-catalog-content\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.756379 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-utilities\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.777494 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2stf4\" (UniqueName: \"kubernetes.io/projected/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-kube-api-access-2stf4\") pod \"redhat-marketplace-d86bl\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:29 crc kubenswrapper[4652]: I1205 06:37:29.932377 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:30 crc kubenswrapper[4652]: I1205 06:37:30.623750 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d86bl"] Dec 05 06:37:31 crc kubenswrapper[4652]: I1205 06:37:31.125875 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:37:31 crc kubenswrapper[4652]: E1205 06:37:31.126282 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:37:31 crc kubenswrapper[4652]: I1205 06:37:31.251632 4652 generic.go:334] "Generic (PLEG): container finished" podID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerID="2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450" exitCode=0 Dec 05 06:37:31 crc kubenswrapper[4652]: I1205 06:37:31.251743 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d86bl" event={"ID":"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b","Type":"ContainerDied","Data":"2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450"} Dec 05 06:37:31 crc kubenswrapper[4652]: I1205 06:37:31.251981 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d86bl" event={"ID":"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b","Type":"ContainerStarted","Data":"6cd537a88f16f070bf39f5d0e8761e744fcb7252540e8f6f4c432ec37de50444"} Dec 05 06:37:31 crc kubenswrapper[4652]: I1205 06:37:31.254809 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:37:32 crc kubenswrapper[4652]: I1205 06:37:32.262389 4652 generic.go:334] "Generic (PLEG): container finished" podID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerID="ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730" exitCode=0 Dec 05 06:37:32 crc kubenswrapper[4652]: I1205 06:37:32.262460 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d86bl" event={"ID":"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b","Type":"ContainerDied","Data":"ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730"} Dec 05 06:37:33 crc kubenswrapper[4652]: I1205 06:37:33.273083 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d86bl" event={"ID":"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b","Type":"ContainerStarted","Data":"c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c"} Dec 05 06:37:33 crc kubenswrapper[4652]: I1205 06:37:33.294170 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d86bl" podStartSLOduration=2.698559537 podStartE2EDuration="4.294152037s" podCreationTimestamp="2025-12-05 06:37:29 +0000 UTC" firstStartedPulling="2025-12-05 06:37:31.253885496 +0000 UTC m=+4253.490615762" lastFinishedPulling="2025-12-05 06:37:32.849477994 +0000 UTC m=+4255.086208262" observedRunningTime="2025-12-05 06:37:33.286905954 +0000 UTC m=+4255.523636222" watchObservedRunningTime="2025-12-05 06:37:33.294152037 +0000 UTC m=+4255.530882304" Dec 05 06:37:39 crc kubenswrapper[4652]: I1205 06:37:39.932744 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:39 crc kubenswrapper[4652]: I1205 06:37:39.933312 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:39 crc kubenswrapper[4652]: I1205 06:37:39.971334 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:40 crc kubenswrapper[4652]: I1205 06:37:40.381124 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:40 crc kubenswrapper[4652]: I1205 06:37:40.421996 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d86bl"] Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.364473 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d86bl" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerName="registry-server" containerID="cri-o://c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c" gracePeriod=2 Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.759530 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.859151 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2stf4\" (UniqueName: \"kubernetes.io/projected/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-kube-api-access-2stf4\") pod \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.859240 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-catalog-content\") pod \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.859299 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-utilities\") pod \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\" (UID: \"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b\") " Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.860524 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-utilities" (OuterVolumeSpecName: "utilities") pod "1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" (UID: "1dad71b2-e4b1-438b-bc9d-50ed13a9f34b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.865758 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-kube-api-access-2stf4" (OuterVolumeSpecName: "kube-api-access-2stf4") pod "1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" (UID: "1dad71b2-e4b1-438b-bc9d-50ed13a9f34b"). InnerVolumeSpecName "kube-api-access-2stf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.874686 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" (UID: "1dad71b2-e4b1-438b-bc9d-50ed13a9f34b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.962481 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2stf4\" (UniqueName: \"kubernetes.io/projected/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-kube-api-access-2stf4\") on node \"crc\" DevicePath \"\"" Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.962511 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:37:42 crc kubenswrapper[4652]: I1205 06:37:42.962523 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.375907 4652 generic.go:334] "Generic (PLEG): container finished" podID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerID="c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c" exitCode=0 Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.375971 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d86bl" event={"ID":"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b","Type":"ContainerDied","Data":"c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c"} Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.376051 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d86bl" event={"ID":"1dad71b2-e4b1-438b-bc9d-50ed13a9f34b","Type":"ContainerDied","Data":"6cd537a88f16f070bf39f5d0e8761e744fcb7252540e8f6f4c432ec37de50444"} Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.376051 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d86bl" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.376073 4652 scope.go:117] "RemoveContainer" containerID="c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.400001 4652 scope.go:117] "RemoveContainer" containerID="ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.409679 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d86bl"] Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.417385 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d86bl"] Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.440213 4652 scope.go:117] "RemoveContainer" containerID="2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.459686 4652 scope.go:117] "RemoveContainer" containerID="c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c" Dec 05 06:37:43 crc kubenswrapper[4652]: E1205 06:37:43.460364 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c\": container with ID starting with c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c not found: ID does not exist" containerID="c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.460468 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c"} err="failed to get container status \"c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c\": rpc error: code = NotFound desc = could not find container \"c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c\": container with ID starting with c583ba18316c255a92b9454804c4c63c74e6c16c0bc52721b81cd060976e809c not found: ID does not exist" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.460544 4652 scope.go:117] "RemoveContainer" containerID="ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730" Dec 05 06:37:43 crc kubenswrapper[4652]: E1205 06:37:43.461045 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730\": container with ID starting with ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730 not found: ID does not exist" containerID="ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.461122 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730"} err="failed to get container status \"ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730\": rpc error: code = NotFound desc = could not find container \"ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730\": container with ID starting with ed83587d304b0fc5590c8689baf8113211c495f53791e2b49888039b212c7730 not found: ID does not exist" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.461198 4652 scope.go:117] "RemoveContainer" containerID="2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450" Dec 05 06:37:43 crc kubenswrapper[4652]: E1205 06:37:43.461541 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450\": container with ID starting with 2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450 not found: ID does not exist" containerID="2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450" Dec 05 06:37:43 crc kubenswrapper[4652]: I1205 06:37:43.461583 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450"} err="failed to get container status \"2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450\": rpc error: code = NotFound desc = could not find container \"2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450\": container with ID starting with 2dc48ce2aa6f3cbb6f0f5750bb23edc691777bb33e9e77224f8adce9c1c64450 not found: ID does not exist" Dec 05 06:37:44 crc kubenswrapper[4652]: I1205 06:37:44.138093 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" path="/var/lib/kubelet/pods/1dad71b2-e4b1-438b-bc9d-50ed13a9f34b/volumes" Dec 05 06:37:45 crc kubenswrapper[4652]: I1205 06:37:45.125315 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:37:45 crc kubenswrapper[4652]: E1205 06:37:45.125545 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:37:59 crc kubenswrapper[4652]: I1205 06:37:59.126630 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:37:59 crc kubenswrapper[4652]: E1205 06:37:59.127923 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:38:12 crc kubenswrapper[4652]: I1205 06:38:12.126029 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:38:12 crc kubenswrapper[4652]: E1205 06:38:12.126975 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:38:25 crc kubenswrapper[4652]: I1205 06:38:25.125670 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:38:25 crc kubenswrapper[4652]: E1205 06:38:25.126421 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.865720 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q7gcl"] Dec 05 06:38:27 crc kubenswrapper[4652]: E1205 06:38:27.866284 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerName="extract-utilities" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.866297 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerName="extract-utilities" Dec 05 06:38:27 crc kubenswrapper[4652]: E1205 06:38:27.866321 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerName="extract-content" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.866328 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerName="extract-content" Dec 05 06:38:27 crc kubenswrapper[4652]: E1205 06:38:27.866344 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerName="registry-server" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.866350 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerName="registry-server" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.866550 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dad71b2-e4b1-438b-bc9d-50ed13a9f34b" containerName="registry-server" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.867863 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.876178 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q7gcl"] Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.911658 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-utilities\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.911864 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2s4z\" (UniqueName: \"kubernetes.io/projected/8d10ce98-0e0e-4f8d-be46-acc13ec44992-kube-api-access-j2s4z\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:27 crc kubenswrapper[4652]: I1205 06:38:27.911908 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-catalog-content\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.013324 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2s4z\" (UniqueName: \"kubernetes.io/projected/8d10ce98-0e0e-4f8d-be46-acc13ec44992-kube-api-access-j2s4z\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.013390 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-catalog-content\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.013645 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-utilities\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.014053 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-catalog-content\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.014112 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-utilities\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.110357 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2s4z\" (UniqueName: \"kubernetes.io/projected/8d10ce98-0e0e-4f8d-be46-acc13ec44992-kube-api-access-j2s4z\") pod \"redhat-operators-q7gcl\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.190911 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.610260 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q7gcl"] Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.806057 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7gcl" event={"ID":"8d10ce98-0e0e-4f8d-be46-acc13ec44992","Type":"ContainerStarted","Data":"1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840"} Dec 05 06:38:28 crc kubenswrapper[4652]: I1205 06:38:28.806100 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7gcl" event={"ID":"8d10ce98-0e0e-4f8d-be46-acc13ec44992","Type":"ContainerStarted","Data":"72bdd721667d4f0cba09d1a8c3e492aa4ab1f9104d9372f29e073ec5afc2b9fd"} Dec 05 06:38:29 crc kubenswrapper[4652]: I1205 06:38:29.829420 4652 generic.go:334] "Generic (PLEG): container finished" podID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerID="1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840" exitCode=0 Dec 05 06:38:29 crc kubenswrapper[4652]: I1205 06:38:29.829465 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7gcl" event={"ID":"8d10ce98-0e0e-4f8d-be46-acc13ec44992","Type":"ContainerDied","Data":"1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840"} Dec 05 06:38:29 crc kubenswrapper[4652]: I1205 06:38:29.829836 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7gcl" event={"ID":"8d10ce98-0e0e-4f8d-be46-acc13ec44992","Type":"ContainerStarted","Data":"f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a"} Dec 05 06:38:31 crc kubenswrapper[4652]: I1205 06:38:31.849159 4652 generic.go:334] "Generic (PLEG): container finished" podID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerID="f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a" exitCode=0 Dec 05 06:38:31 crc kubenswrapper[4652]: I1205 06:38:31.849232 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7gcl" event={"ID":"8d10ce98-0e0e-4f8d-be46-acc13ec44992","Type":"ContainerDied","Data":"f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a"} Dec 05 06:38:32 crc kubenswrapper[4652]: I1205 06:38:32.862461 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7gcl" event={"ID":"8d10ce98-0e0e-4f8d-be46-acc13ec44992","Type":"ContainerStarted","Data":"d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3"} Dec 05 06:38:32 crc kubenswrapper[4652]: I1205 06:38:32.887912 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q7gcl" podStartSLOduration=2.350358928 podStartE2EDuration="5.887889469s" podCreationTimestamp="2025-12-05 06:38:27 +0000 UTC" firstStartedPulling="2025-12-05 06:38:28.807635913 +0000 UTC m=+4311.044366180" lastFinishedPulling="2025-12-05 06:38:32.345166454 +0000 UTC m=+4314.581896721" observedRunningTime="2025-12-05 06:38:32.881040222 +0000 UTC m=+4315.117770490" watchObservedRunningTime="2025-12-05 06:38:32.887889469 +0000 UTC m=+4315.124619736" Dec 05 06:38:38 crc kubenswrapper[4652]: I1205 06:38:38.191794 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:38 crc kubenswrapper[4652]: I1205 06:38:38.192125 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:38 crc kubenswrapper[4652]: I1205 06:38:38.239668 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:38 crc kubenswrapper[4652]: I1205 06:38:38.969416 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:39 crc kubenswrapper[4652]: I1205 06:38:39.015135 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q7gcl"] Dec 05 06:38:39 crc kubenswrapper[4652]: I1205 06:38:39.126380 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:38:39 crc kubenswrapper[4652]: E1205 06:38:39.126705 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:38:40 crc kubenswrapper[4652]: I1205 06:38:40.945630 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-q7gcl" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerName="registry-server" containerID="cri-o://d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3" gracePeriod=2 Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.324318 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.416610 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2s4z\" (UniqueName: \"kubernetes.io/projected/8d10ce98-0e0e-4f8d-be46-acc13ec44992-kube-api-access-j2s4z\") pod \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.416786 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-catalog-content\") pod \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.416871 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-utilities\") pod \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\" (UID: \"8d10ce98-0e0e-4f8d-be46-acc13ec44992\") " Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.417780 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-utilities" (OuterVolumeSpecName: "utilities") pod "8d10ce98-0e0e-4f8d-be46-acc13ec44992" (UID: "8d10ce98-0e0e-4f8d-be46-acc13ec44992"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.421587 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d10ce98-0e0e-4f8d-be46-acc13ec44992-kube-api-access-j2s4z" (OuterVolumeSpecName: "kube-api-access-j2s4z") pod "8d10ce98-0e0e-4f8d-be46-acc13ec44992" (UID: "8d10ce98-0e0e-4f8d-be46-acc13ec44992"). InnerVolumeSpecName "kube-api-access-j2s4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.498291 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d10ce98-0e0e-4f8d-be46-acc13ec44992" (UID: "8d10ce98-0e0e-4f8d-be46-acc13ec44992"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.520863 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.520923 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2s4z\" (UniqueName: \"kubernetes.io/projected/8d10ce98-0e0e-4f8d-be46-acc13ec44992-kube-api-access-j2s4z\") on node \"crc\" DevicePath \"\"" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.520937 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d10ce98-0e0e-4f8d-be46-acc13ec44992-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.956636 4652 generic.go:334] "Generic (PLEG): container finished" podID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerID="d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3" exitCode=0 Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.956688 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7gcl" event={"ID":"8d10ce98-0e0e-4f8d-be46-acc13ec44992","Type":"ContainerDied","Data":"d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3"} Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.956743 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7gcl" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.956752 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7gcl" event={"ID":"8d10ce98-0e0e-4f8d-be46-acc13ec44992","Type":"ContainerDied","Data":"72bdd721667d4f0cba09d1a8c3e492aa4ab1f9104d9372f29e073ec5afc2b9fd"} Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.956783 4652 scope.go:117] "RemoveContainer" containerID="d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.981764 4652 scope.go:117] "RemoveContainer" containerID="f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a" Dec 05 06:38:41 crc kubenswrapper[4652]: I1205 06:38:41.998160 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q7gcl"] Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.001879 4652 scope.go:117] "RemoveContainer" containerID="1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840" Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.008191 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-q7gcl"] Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.040017 4652 scope.go:117] "RemoveContainer" containerID="d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3" Dec 05 06:38:42 crc kubenswrapper[4652]: E1205 06:38:42.040438 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3\": container with ID starting with d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3 not found: ID does not exist" containerID="d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3" Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.040469 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3"} err="failed to get container status \"d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3\": rpc error: code = NotFound desc = could not find container \"d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3\": container with ID starting with d0f23bd2caba029c512341302898c77478b2ef534f19d3dac1fc30f53ec3cee3 not found: ID does not exist" Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.040490 4652 scope.go:117] "RemoveContainer" containerID="f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a" Dec 05 06:38:42 crc kubenswrapper[4652]: E1205 06:38:42.040842 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a\": container with ID starting with f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a not found: ID does not exist" containerID="f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a" Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.040891 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a"} err="failed to get container status \"f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a\": rpc error: code = NotFound desc = could not find container \"f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a\": container with ID starting with f0f7bb0d242a6f6610b73d8d54ce708835eb83877f53ae6cb7e0914915f5dc2a not found: ID does not exist" Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.040920 4652 scope.go:117] "RemoveContainer" containerID="1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840" Dec 05 06:38:42 crc kubenswrapper[4652]: E1205 06:38:42.041269 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840\": container with ID starting with 1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840 not found: ID does not exist" containerID="1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840" Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.041295 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840"} err="failed to get container status \"1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840\": rpc error: code = NotFound desc = could not find container \"1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840\": container with ID starting with 1a0660ad9a4758b36a7bc1e165decaf17d44db389b4ed432f41407ba0156e840 not found: ID does not exist" Dec 05 06:38:42 crc kubenswrapper[4652]: I1205 06:38:42.136298 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" path="/var/lib/kubelet/pods/8d10ce98-0e0e-4f8d-be46-acc13ec44992/volumes" Dec 05 06:38:51 crc kubenswrapper[4652]: I1205 06:38:51.126158 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:38:51 crc kubenswrapper[4652]: E1205 06:38:51.127108 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:39:04 crc kubenswrapper[4652]: I1205 06:39:04.125761 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:39:04 crc kubenswrapper[4652]: E1205 06:39:04.126957 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:39:15 crc kubenswrapper[4652]: I1205 06:39:15.125220 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:39:15 crc kubenswrapper[4652]: E1205 06:39:15.125811 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:39:28 crc kubenswrapper[4652]: I1205 06:39:28.130200 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:39:28 crc kubenswrapper[4652]: E1205 06:39:28.130755 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:39:42 crc kubenswrapper[4652]: I1205 06:39:42.126112 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:39:42 crc kubenswrapper[4652]: E1205 06:39:42.126900 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:39:55 crc kubenswrapper[4652]: I1205 06:39:55.125962 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:39:55 crc kubenswrapper[4652]: E1205 06:39:55.126541 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:40:10 crc kubenswrapper[4652]: I1205 06:40:10.125986 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:40:10 crc kubenswrapper[4652]: E1205 06:40:10.126666 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:40:21 crc kubenswrapper[4652]: I1205 06:40:21.125774 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:40:21 crc kubenswrapper[4652]: E1205 06:40:21.126310 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:40:35 crc kubenswrapper[4652]: I1205 06:40:35.125203 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:40:35 crc kubenswrapper[4652]: E1205 06:40:35.125789 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:40:46 crc kubenswrapper[4652]: I1205 06:40:46.126471 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:40:46 crc kubenswrapper[4652]: E1205 06:40:46.127298 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:41:01 crc kubenswrapper[4652]: I1205 06:41:01.002938 4652 generic.go:334] "Generic (PLEG): container finished" podID="a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" containerID="4ee485b6b6516b231f15bffb971b75f7f347fddbccd4e67ec4b93ef1f6a43dbb" exitCode=1 Dec 05 06:41:01 crc kubenswrapper[4652]: I1205 06:41:01.003036 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2","Type":"ContainerDied","Data":"4ee485b6b6516b231f15bffb971b75f7f347fddbccd4e67ec4b93ef1f6a43dbb"} Dec 05 06:41:01 crc kubenswrapper[4652]: I1205 06:41:01.125756 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:41:01 crc kubenswrapper[4652]: E1205 06:41:01.126014 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.286797 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.314538 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ssh-key\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.314637 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.314708 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-config-data\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.314761 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config-secret\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.314789 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.314892 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-workdir\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.314982 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ca-certs\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.315041 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-temporary\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.315078 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5vzk\" (UniqueName: \"kubernetes.io/projected/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-kube-api-access-t5vzk\") pod \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\" (UID: \"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2\") " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.321523 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.325238 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-config-data" (OuterVolumeSpecName: "config-data") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.325870 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.331474 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.332052 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-kube-api-access-t5vzk" (OuterVolumeSpecName: "kube-api-access-t5vzk") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "kube-api-access-t5vzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.345442 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.347116 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.352756 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.364655 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" (UID: "a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417734 4652 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417762 4652 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417772 4652 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417784 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5vzk\" (UniqueName: \"kubernetes.io/projected/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-kube-api-access-t5vzk\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417793 4652 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417802 4652 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417809 4652 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417818 4652 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.417863 4652 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.434641 4652 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 05 06:41:02 crc kubenswrapper[4652]: I1205 06:41:02.519044 4652 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:41:03 crc kubenswrapper[4652]: I1205 06:41:03.018723 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2","Type":"ContainerDied","Data":"9ca1a1c550d0f041facc82dd0375c36e185e9be696bdd0ffd6a1f95db9ec9b51"} Dec 05 06:41:03 crc kubenswrapper[4652]: I1205 06:41:03.018945 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ca1a1c550d0f041facc82dd0375c36e185e9be696bdd0ffd6a1f95db9ec9b51" Dec 05 06:41:03 crc kubenswrapper[4652]: I1205 06:41:03.018799 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.052239 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 06:41:05 crc kubenswrapper[4652]: E1205 06:41:05.052813 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerName="extract-utilities" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.052827 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerName="extract-utilities" Dec 05 06:41:05 crc kubenswrapper[4652]: E1205 06:41:05.052835 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerName="registry-server" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.052841 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerName="registry-server" Dec 05 06:41:05 crc kubenswrapper[4652]: E1205 06:41:05.052864 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" containerName="tempest-tests-tempest-tests-runner" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.052869 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" containerName="tempest-tests-tempest-tests-runner" Dec 05 06:41:05 crc kubenswrapper[4652]: E1205 06:41:05.052895 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerName="extract-content" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.052900 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerName="extract-content" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.053103 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2" containerName="tempest-tests-tempest-tests-runner" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.053112 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d10ce98-0e0e-4f8d-be46-acc13ec44992" containerName="registry-server" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.053743 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.055433 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-694t4" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.061682 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.168103 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfzhx\" (UniqueName: \"kubernetes.io/projected/ed44c043-cf02-4901-ab0a-a86753eef277-kube-api-access-zfzhx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ed44c043-cf02-4901-ab0a-a86753eef277\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.168237 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ed44c043-cf02-4901-ab0a-a86753eef277\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.269362 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ed44c043-cf02-4901-ab0a-a86753eef277\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.269490 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfzhx\" (UniqueName: \"kubernetes.io/projected/ed44c043-cf02-4901-ab0a-a86753eef277-kube-api-access-zfzhx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ed44c043-cf02-4901-ab0a-a86753eef277\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.269794 4652 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ed44c043-cf02-4901-ab0a-a86753eef277\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.408111 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfzhx\" (UniqueName: \"kubernetes.io/projected/ed44c043-cf02-4901-ab0a-a86753eef277-kube-api-access-zfzhx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ed44c043-cf02-4901-ab0a-a86753eef277\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.438546 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"ed44c043-cf02-4901-ab0a-a86753eef277\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:05 crc kubenswrapper[4652]: I1205 06:41:05.675758 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 06:41:06 crc kubenswrapper[4652]: I1205 06:41:06.045991 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 06:41:07 crc kubenswrapper[4652]: I1205 06:41:07.050127 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"ed44c043-cf02-4901-ab0a-a86753eef277","Type":"ContainerStarted","Data":"2bf24fd2af5a1918cba14b371d099e1dcd0cc726d88449d75034f5dcf4432e49"} Dec 05 06:41:08 crc kubenswrapper[4652]: I1205 06:41:08.060197 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"ed44c043-cf02-4901-ab0a-a86753eef277","Type":"ContainerStarted","Data":"4c55c56224bc246f88edbb4a55478b3f6cbf6f2e017c9c4cff510767508c6460"} Dec 05 06:41:08 crc kubenswrapper[4652]: I1205 06:41:08.071066 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.974385044 podStartE2EDuration="3.071049766s" podCreationTimestamp="2025-12-05 06:41:05 +0000 UTC" firstStartedPulling="2025-12-05 06:41:06.04961831 +0000 UTC m=+4468.286348577" lastFinishedPulling="2025-12-05 06:41:07.146283031 +0000 UTC m=+4469.383013299" observedRunningTime="2025-12-05 06:41:08.068723091 +0000 UTC m=+4470.305453358" watchObservedRunningTime="2025-12-05 06:41:08.071049766 +0000 UTC m=+4470.307780034" Dec 05 06:41:13 crc kubenswrapper[4652]: I1205 06:41:13.126297 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:41:13 crc kubenswrapper[4652]: E1205 06:41:13.127216 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:41:24 crc kubenswrapper[4652]: I1205 06:41:24.126614 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:41:24 crc kubenswrapper[4652]: E1205 06:41:24.127206 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:41:39 crc kubenswrapper[4652]: I1205 06:41:39.126113 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:41:39 crc kubenswrapper[4652]: E1205 06:41:39.127199 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.370521 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jzx4s/must-gather-42phd"] Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.372824 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.374314 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jzx4s"/"default-dockercfg-hk9d9" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.375584 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jzx4s"/"kube-root-ca.crt" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.378728 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jzx4s"/"openshift-service-ca.crt" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.383000 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jzx4s/must-gather-42phd"] Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.444142 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/71887dbc-5af0-4607-acfd-ee6e12e90d68-must-gather-output\") pod \"must-gather-42phd\" (UID: \"71887dbc-5af0-4607-acfd-ee6e12e90d68\") " pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.444322 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9km8h\" (UniqueName: \"kubernetes.io/projected/71887dbc-5af0-4607-acfd-ee6e12e90d68-kube-api-access-9km8h\") pod \"must-gather-42phd\" (UID: \"71887dbc-5af0-4607-acfd-ee6e12e90d68\") " pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.546603 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/71887dbc-5af0-4607-acfd-ee6e12e90d68-must-gather-output\") pod \"must-gather-42phd\" (UID: \"71887dbc-5af0-4607-acfd-ee6e12e90d68\") " pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.546756 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9km8h\" (UniqueName: \"kubernetes.io/projected/71887dbc-5af0-4607-acfd-ee6e12e90d68-kube-api-access-9km8h\") pod \"must-gather-42phd\" (UID: \"71887dbc-5af0-4607-acfd-ee6e12e90d68\") " pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.547117 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/71887dbc-5af0-4607-acfd-ee6e12e90d68-must-gather-output\") pod \"must-gather-42phd\" (UID: \"71887dbc-5af0-4607-acfd-ee6e12e90d68\") " pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.561795 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9km8h\" (UniqueName: \"kubernetes.io/projected/71887dbc-5af0-4607-acfd-ee6e12e90d68-kube-api-access-9km8h\") pod \"must-gather-42phd\" (UID: \"71887dbc-5af0-4607-acfd-ee6e12e90d68\") " pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:41:49 crc kubenswrapper[4652]: I1205 06:41:49.688014 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:41:50 crc kubenswrapper[4652]: I1205 06:41:50.107040 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jzx4s/must-gather-42phd"] Dec 05 06:41:50 crc kubenswrapper[4652]: I1205 06:41:50.126654 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:41:50 crc kubenswrapper[4652]: E1205 06:41:50.127500 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:41:50 crc kubenswrapper[4652]: I1205 06:41:50.467748 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/must-gather-42phd" event={"ID":"71887dbc-5af0-4607-acfd-ee6e12e90d68","Type":"ContainerStarted","Data":"72f2b4a72e5ef6733979b2cefedab6809b90c99dbe7cf056c4bc7093d7536f89"} Dec 05 06:41:56 crc kubenswrapper[4652]: I1205 06:41:56.543531 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/must-gather-42phd" event={"ID":"71887dbc-5af0-4607-acfd-ee6e12e90d68","Type":"ContainerStarted","Data":"af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280"} Dec 05 06:41:56 crc kubenswrapper[4652]: I1205 06:41:56.544292 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/must-gather-42phd" event={"ID":"71887dbc-5af0-4607-acfd-ee6e12e90d68","Type":"ContainerStarted","Data":"0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47"} Dec 05 06:41:56 crc kubenswrapper[4652]: I1205 06:41:56.564647 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jzx4s/must-gather-42phd" podStartSLOduration=2.13793482 podStartE2EDuration="7.564636295s" podCreationTimestamp="2025-12-05 06:41:49 +0000 UTC" firstStartedPulling="2025-12-05 06:41:50.129751422 +0000 UTC m=+4512.366481689" lastFinishedPulling="2025-12-05 06:41:55.556452897 +0000 UTC m=+4517.793183164" observedRunningTime="2025-12-05 06:41:56.556780916 +0000 UTC m=+4518.793511183" watchObservedRunningTime="2025-12-05 06:41:56.564636295 +0000 UTC m=+4518.801366561" Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.069138 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-kzh5w"] Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.070681 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.167400 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bba73e58-b263-4f13-8860-788a98b6fea1-host\") pod \"crc-debug-kzh5w\" (UID: \"bba73e58-b263-4f13-8860-788a98b6fea1\") " pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.167518 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npd7k\" (UniqueName: \"kubernetes.io/projected/bba73e58-b263-4f13-8860-788a98b6fea1-kube-api-access-npd7k\") pod \"crc-debug-kzh5w\" (UID: \"bba73e58-b263-4f13-8860-788a98b6fea1\") " pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.269211 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bba73e58-b263-4f13-8860-788a98b6fea1-host\") pod \"crc-debug-kzh5w\" (UID: \"bba73e58-b263-4f13-8860-788a98b6fea1\") " pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.269327 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npd7k\" (UniqueName: \"kubernetes.io/projected/bba73e58-b263-4f13-8860-788a98b6fea1-kube-api-access-npd7k\") pod \"crc-debug-kzh5w\" (UID: \"bba73e58-b263-4f13-8860-788a98b6fea1\") " pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.269384 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bba73e58-b263-4f13-8860-788a98b6fea1-host\") pod \"crc-debug-kzh5w\" (UID: \"bba73e58-b263-4f13-8860-788a98b6fea1\") " pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.290408 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npd7k\" (UniqueName: \"kubernetes.io/projected/bba73e58-b263-4f13-8860-788a98b6fea1-kube-api-access-npd7k\") pod \"crc-debug-kzh5w\" (UID: \"bba73e58-b263-4f13-8860-788a98b6fea1\") " pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.390127 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:41:59 crc kubenswrapper[4652]: W1205 06:41:59.414044 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbba73e58_b263_4f13_8860_788a98b6fea1.slice/crio-9d46b3570b15d867ad3d0d257cbf705db84a7d4fdc59eaeb4aff66ea6ce8b8cb WatchSource:0}: Error finding container 9d46b3570b15d867ad3d0d257cbf705db84a7d4fdc59eaeb4aff66ea6ce8b8cb: Status 404 returned error can't find the container with id 9d46b3570b15d867ad3d0d257cbf705db84a7d4fdc59eaeb4aff66ea6ce8b8cb Dec 05 06:41:59 crc kubenswrapper[4652]: I1205 06:41:59.570911 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" event={"ID":"bba73e58-b263-4f13-8860-788a98b6fea1","Type":"ContainerStarted","Data":"9d46b3570b15d867ad3d0d257cbf705db84a7d4fdc59eaeb4aff66ea6ce8b8cb"} Dec 05 06:42:03 crc kubenswrapper[4652]: I1205 06:42:03.126128 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:42:03 crc kubenswrapper[4652]: E1205 06:42:03.126734 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:42:08 crc kubenswrapper[4652]: I1205 06:42:08.648164 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" event={"ID":"bba73e58-b263-4f13-8860-788a98b6fea1","Type":"ContainerStarted","Data":"1fa2e7128d30e4e90fb1ee74143fd2596274bd3705ec370f75d0e601e2e669fa"} Dec 05 06:42:08 crc kubenswrapper[4652]: I1205 06:42:08.664635 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" podStartSLOduration=1.270900591 podStartE2EDuration="9.664620348s" podCreationTimestamp="2025-12-05 06:41:59 +0000 UTC" firstStartedPulling="2025-12-05 06:41:59.415951222 +0000 UTC m=+4521.652681479" lastFinishedPulling="2025-12-05 06:42:07.809670969 +0000 UTC m=+4530.046401236" observedRunningTime="2025-12-05 06:42:08.660180401 +0000 UTC m=+4530.896910668" watchObservedRunningTime="2025-12-05 06:42:08.664620348 +0000 UTC m=+4530.901350615" Dec 05 06:42:18 crc kubenswrapper[4652]: I1205 06:42:18.130946 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:42:18 crc kubenswrapper[4652]: I1205 06:42:18.733735 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"beb45db72cad990812bece4f42e33b4e0ed1610f1d17c0ba8eb299a6992d0374"} Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.116256 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c9lmp"] Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.119305 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.198080 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c9lmp"] Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.282265 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-utilities\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.282665 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-catalog-content\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.282888 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7scqh\" (UniqueName: \"kubernetes.io/projected/79f97820-7389-4146-b2a6-f7daedd8f94b-kube-api-access-7scqh\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.387933 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-utilities\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.388046 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-catalog-content\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.388170 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7scqh\" (UniqueName: \"kubernetes.io/projected/79f97820-7389-4146-b2a6-f7daedd8f94b-kube-api-access-7scqh\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.388365 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-utilities\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.388439 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-catalog-content\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.423513 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7scqh\" (UniqueName: \"kubernetes.io/projected/79f97820-7389-4146-b2a6-f7daedd8f94b-kube-api-access-7scqh\") pod \"community-operators-c9lmp\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.454636 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:38 crc kubenswrapper[4652]: I1205 06:42:38.950250 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c9lmp"] Dec 05 06:42:39 crc kubenswrapper[4652]: I1205 06:42:39.915942 4652 generic.go:334] "Generic (PLEG): container finished" podID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerID="e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233" exitCode=0 Dec 05 06:42:39 crc kubenswrapper[4652]: I1205 06:42:39.916055 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9lmp" event={"ID":"79f97820-7389-4146-b2a6-f7daedd8f94b","Type":"ContainerDied","Data":"e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233"} Dec 05 06:42:39 crc kubenswrapper[4652]: I1205 06:42:39.916511 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9lmp" event={"ID":"79f97820-7389-4146-b2a6-f7daedd8f94b","Type":"ContainerStarted","Data":"8a38092136089d498d1c6f6d53cc2d907d1f7cad598b48540a8075c61e0addf5"} Dec 05 06:42:39 crc kubenswrapper[4652]: I1205 06:42:39.921081 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:42:40 crc kubenswrapper[4652]: I1205 06:42:40.925333 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9lmp" event={"ID":"79f97820-7389-4146-b2a6-f7daedd8f94b","Type":"ContainerStarted","Data":"162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97"} Dec 05 06:42:41 crc kubenswrapper[4652]: I1205 06:42:41.941227 4652 generic.go:334] "Generic (PLEG): container finished" podID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerID="162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97" exitCode=0 Dec 05 06:42:41 crc kubenswrapper[4652]: I1205 06:42:41.941516 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9lmp" event={"ID":"79f97820-7389-4146-b2a6-f7daedd8f94b","Type":"ContainerDied","Data":"162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97"} Dec 05 06:42:42 crc kubenswrapper[4652]: I1205 06:42:42.951184 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9lmp" event={"ID":"79f97820-7389-4146-b2a6-f7daedd8f94b","Type":"ContainerStarted","Data":"7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55"} Dec 05 06:42:42 crc kubenswrapper[4652]: I1205 06:42:42.974491 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c9lmp" podStartSLOduration=2.4570565589999998 podStartE2EDuration="4.974469941s" podCreationTimestamp="2025-12-05 06:42:38 +0000 UTC" firstStartedPulling="2025-12-05 06:42:39.920572875 +0000 UTC m=+4562.157303142" lastFinishedPulling="2025-12-05 06:42:42.437986256 +0000 UTC m=+4564.674716524" observedRunningTime="2025-12-05 06:42:42.968014614 +0000 UTC m=+4565.204744882" watchObservedRunningTime="2025-12-05 06:42:42.974469941 +0000 UTC m=+4565.211200209" Dec 05 06:42:48 crc kubenswrapper[4652]: I1205 06:42:48.012656 4652 generic.go:334] "Generic (PLEG): container finished" podID="bba73e58-b263-4f13-8860-788a98b6fea1" containerID="1fa2e7128d30e4e90fb1ee74143fd2596274bd3705ec370f75d0e601e2e669fa" exitCode=0 Dec 05 06:42:48 crc kubenswrapper[4652]: I1205 06:42:48.012761 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" event={"ID":"bba73e58-b263-4f13-8860-788a98b6fea1","Type":"ContainerDied","Data":"1fa2e7128d30e4e90fb1ee74143fd2596274bd3705ec370f75d0e601e2e669fa"} Dec 05 06:42:48 crc kubenswrapper[4652]: I1205 06:42:48.455262 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:48 crc kubenswrapper[4652]: I1205 06:42:48.455577 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:48 crc kubenswrapper[4652]: I1205 06:42:48.497717 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.062363 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.096947 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c9lmp"] Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.120988 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.147386 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-kzh5w"] Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.156735 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-kzh5w"] Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.325379 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npd7k\" (UniqueName: \"kubernetes.io/projected/bba73e58-b263-4f13-8860-788a98b6fea1-kube-api-access-npd7k\") pod \"bba73e58-b263-4f13-8860-788a98b6fea1\" (UID: \"bba73e58-b263-4f13-8860-788a98b6fea1\") " Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.325634 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bba73e58-b263-4f13-8860-788a98b6fea1-host\") pod \"bba73e58-b263-4f13-8860-788a98b6fea1\" (UID: \"bba73e58-b263-4f13-8860-788a98b6fea1\") " Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.325693 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bba73e58-b263-4f13-8860-788a98b6fea1-host" (OuterVolumeSpecName: "host") pod "bba73e58-b263-4f13-8860-788a98b6fea1" (UID: "bba73e58-b263-4f13-8860-788a98b6fea1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.326351 4652 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bba73e58-b263-4f13-8860-788a98b6fea1-host\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.813390 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bba73e58-b263-4f13-8860-788a98b6fea1-kube-api-access-npd7k" (OuterVolumeSpecName: "kube-api-access-npd7k") pod "bba73e58-b263-4f13-8860-788a98b6fea1" (UID: "bba73e58-b263-4f13-8860-788a98b6fea1"). InnerVolumeSpecName "kube-api-access-npd7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:42:49 crc kubenswrapper[4652]: I1205 06:42:49.835392 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npd7k\" (UniqueName: \"kubernetes.io/projected/bba73e58-b263-4f13-8860-788a98b6fea1-kube-api-access-npd7k\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.038309 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-kzh5w" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.038398 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d46b3570b15d867ad3d0d257cbf705db84a7d4fdc59eaeb4aff66ea6ce8b8cb" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.138515 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bba73e58-b263-4f13-8860-788a98b6fea1" path="/var/lib/kubelet/pods/bba73e58-b263-4f13-8860-788a98b6fea1/volumes" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.292119 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-7tc4r"] Dec 05 06:42:50 crc kubenswrapper[4652]: E1205 06:42:50.292612 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bba73e58-b263-4f13-8860-788a98b6fea1" containerName="container-00" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.292631 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="bba73e58-b263-4f13-8860-788a98b6fea1" containerName="container-00" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.292841 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="bba73e58-b263-4f13-8860-788a98b6fea1" containerName="container-00" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.293578 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.347353 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a4f156d-e99f-4948-ba75-7256b3b4510d-host\") pod \"crc-debug-7tc4r\" (UID: \"2a4f156d-e99f-4948-ba75-7256b3b4510d\") " pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.347565 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8s2b\" (UniqueName: \"kubernetes.io/projected/2a4f156d-e99f-4948-ba75-7256b3b4510d-kube-api-access-w8s2b\") pod \"crc-debug-7tc4r\" (UID: \"2a4f156d-e99f-4948-ba75-7256b3b4510d\") " pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.450352 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a4f156d-e99f-4948-ba75-7256b3b4510d-host\") pod \"crc-debug-7tc4r\" (UID: \"2a4f156d-e99f-4948-ba75-7256b3b4510d\") " pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.450436 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8s2b\" (UniqueName: \"kubernetes.io/projected/2a4f156d-e99f-4948-ba75-7256b3b4510d-kube-api-access-w8s2b\") pod \"crc-debug-7tc4r\" (UID: \"2a4f156d-e99f-4948-ba75-7256b3b4510d\") " pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.450500 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a4f156d-e99f-4948-ba75-7256b3b4510d-host\") pod \"crc-debug-7tc4r\" (UID: \"2a4f156d-e99f-4948-ba75-7256b3b4510d\") " pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.465251 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8s2b\" (UniqueName: \"kubernetes.io/projected/2a4f156d-e99f-4948-ba75-7256b3b4510d-kube-api-access-w8s2b\") pod \"crc-debug-7tc4r\" (UID: \"2a4f156d-e99f-4948-ba75-7256b3b4510d\") " pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:50 crc kubenswrapper[4652]: I1205 06:42:50.611148 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.047899 4652 generic.go:334] "Generic (PLEG): container finished" podID="2a4f156d-e99f-4948-ba75-7256b3b4510d" containerID="387f9ae3bec9f47cbabcde58b8ed37bc8dc761e94d80268a5bc9ace71d6b3eee" exitCode=0 Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.047964 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" event={"ID":"2a4f156d-e99f-4948-ba75-7256b3b4510d","Type":"ContainerDied","Data":"387f9ae3bec9f47cbabcde58b8ed37bc8dc761e94d80268a5bc9ace71d6b3eee"} Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.048344 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" event={"ID":"2a4f156d-e99f-4948-ba75-7256b3b4510d","Type":"ContainerStarted","Data":"f83c290c47d8e590ffa4a1f6f35e8930481993d02ce164069508f87772a7ca41"} Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.048507 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c9lmp" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerName="registry-server" containerID="cri-o://7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55" gracePeriod=2 Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.907753 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.986656 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-utilities\") pod \"79f97820-7389-4146-b2a6-f7daedd8f94b\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.986947 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-catalog-content\") pod \"79f97820-7389-4146-b2a6-f7daedd8f94b\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.987069 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7scqh\" (UniqueName: \"kubernetes.io/projected/79f97820-7389-4146-b2a6-f7daedd8f94b-kube-api-access-7scqh\") pod \"79f97820-7389-4146-b2a6-f7daedd8f94b\" (UID: \"79f97820-7389-4146-b2a6-f7daedd8f94b\") " Dec 05 06:42:51 crc kubenswrapper[4652]: I1205 06:42:51.988193 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-utilities" (OuterVolumeSpecName: "utilities") pod "79f97820-7389-4146-b2a6-f7daedd8f94b" (UID: "79f97820-7389-4146-b2a6-f7daedd8f94b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.018210 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79f97820-7389-4146-b2a6-f7daedd8f94b-kube-api-access-7scqh" (OuterVolumeSpecName: "kube-api-access-7scqh") pod "79f97820-7389-4146-b2a6-f7daedd8f94b" (UID: "79f97820-7389-4146-b2a6-f7daedd8f94b"). InnerVolumeSpecName "kube-api-access-7scqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.052734 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79f97820-7389-4146-b2a6-f7daedd8f94b" (UID: "79f97820-7389-4146-b2a6-f7daedd8f94b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.058174 4652 generic.go:334] "Generic (PLEG): container finished" podID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerID="7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55" exitCode=0 Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.058225 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9lmp" event={"ID":"79f97820-7389-4146-b2a6-f7daedd8f94b","Type":"ContainerDied","Data":"7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55"} Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.058246 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c9lmp" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.058272 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9lmp" event={"ID":"79f97820-7389-4146-b2a6-f7daedd8f94b","Type":"ContainerDied","Data":"8a38092136089d498d1c6f6d53cc2d907d1f7cad598b48540a8075c61e0addf5"} Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.058293 4652 scope.go:117] "RemoveContainer" containerID="7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.090271 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.090295 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7scqh\" (UniqueName: \"kubernetes.io/projected/79f97820-7389-4146-b2a6-f7daedd8f94b-kube-api-access-7scqh\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.090309 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79f97820-7389-4146-b2a6-f7daedd8f94b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.117392 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.133242 4652 scope.go:117] "RemoveContainer" containerID="162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.153680 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c9lmp"] Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.158087 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c9lmp"] Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.162513 4652 scope.go:117] "RemoveContainer" containerID="e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.178736 4652 scope.go:117] "RemoveContainer" containerID="7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55" Dec 05 06:42:52 crc kubenswrapper[4652]: E1205 06:42:52.179106 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55\": container with ID starting with 7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55 not found: ID does not exist" containerID="7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.179144 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55"} err="failed to get container status \"7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55\": rpc error: code = NotFound desc = could not find container \"7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55\": container with ID starting with 7cac7c705572f7a56a776eb5838c6a8390249f1bad8e8eb217806697a8e85a55 not found: ID does not exist" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.179169 4652 scope.go:117] "RemoveContainer" containerID="162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97" Dec 05 06:42:52 crc kubenswrapper[4652]: E1205 06:42:52.179646 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97\": container with ID starting with 162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97 not found: ID does not exist" containerID="162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.179688 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97"} err="failed to get container status \"162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97\": rpc error: code = NotFound desc = could not find container \"162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97\": container with ID starting with 162a0ce0a941c401b456c5521c2ab338be978eaa1dad7835d70601b252926c97 not found: ID does not exist" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.179716 4652 scope.go:117] "RemoveContainer" containerID="e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233" Dec 05 06:42:52 crc kubenswrapper[4652]: E1205 06:42:52.180013 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233\": container with ID starting with e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233 not found: ID does not exist" containerID="e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.180040 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233"} err="failed to get container status \"e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233\": rpc error: code = NotFound desc = could not find container \"e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233\": container with ID starting with e6b809c86f6f9bfd102e54dd8bfb7c78a9dfcc8fe44340ee025c2639c8e8c233 not found: ID does not exist" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.191158 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a4f156d-e99f-4948-ba75-7256b3b4510d-host\") pod \"2a4f156d-e99f-4948-ba75-7256b3b4510d\" (UID: \"2a4f156d-e99f-4948-ba75-7256b3b4510d\") " Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.191256 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2a4f156d-e99f-4948-ba75-7256b3b4510d-host" (OuterVolumeSpecName: "host") pod "2a4f156d-e99f-4948-ba75-7256b3b4510d" (UID: "2a4f156d-e99f-4948-ba75-7256b3b4510d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.191372 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8s2b\" (UniqueName: \"kubernetes.io/projected/2a4f156d-e99f-4948-ba75-7256b3b4510d-kube-api-access-w8s2b\") pod \"2a4f156d-e99f-4948-ba75-7256b3b4510d\" (UID: \"2a4f156d-e99f-4948-ba75-7256b3b4510d\") " Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.191969 4652 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2a4f156d-e99f-4948-ba75-7256b3b4510d-host\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.203316 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a4f156d-e99f-4948-ba75-7256b3b4510d-kube-api-access-w8s2b" (OuterVolumeSpecName: "kube-api-access-w8s2b") pod "2a4f156d-e99f-4948-ba75-7256b3b4510d" (UID: "2a4f156d-e99f-4948-ba75-7256b3b4510d"). InnerVolumeSpecName "kube-api-access-w8s2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:42:52 crc kubenswrapper[4652]: I1205 06:42:52.296790 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8s2b\" (UniqueName: \"kubernetes.io/projected/2a4f156d-e99f-4948-ba75-7256b3b4510d-kube-api-access-w8s2b\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:53 crc kubenswrapper[4652]: I1205 06:42:53.067525 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" event={"ID":"2a4f156d-e99f-4948-ba75-7256b3b4510d","Type":"ContainerDied","Data":"f83c290c47d8e590ffa4a1f6f35e8930481993d02ce164069508f87772a7ca41"} Dec 05 06:42:53 crc kubenswrapper[4652]: I1205 06:42:53.067600 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f83c290c47d8e590ffa4a1f6f35e8930481993d02ce164069508f87772a7ca41" Dec 05 06:42:53 crc kubenswrapper[4652]: I1205 06:42:53.067611 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-7tc4r" Dec 05 06:42:53 crc kubenswrapper[4652]: I1205 06:42:53.313900 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-7tc4r"] Dec 05 06:42:53 crc kubenswrapper[4652]: I1205 06:42:53.322408 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-7tc4r"] Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.135160 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a4f156d-e99f-4948-ba75-7256b3b4510d" path="/var/lib/kubelet/pods/2a4f156d-e99f-4948-ba75-7256b3b4510d/volumes" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.135780 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" path="/var/lib/kubelet/pods/79f97820-7389-4146-b2a6-f7daedd8f94b/volumes" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.451814 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-rs5nn"] Dec 05 06:42:54 crc kubenswrapper[4652]: E1205 06:42:54.452210 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a4f156d-e99f-4948-ba75-7256b3b4510d" containerName="container-00" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.452225 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a4f156d-e99f-4948-ba75-7256b3b4510d" containerName="container-00" Dec 05 06:42:54 crc kubenswrapper[4652]: E1205 06:42:54.452242 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerName="registry-server" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.452247 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerName="registry-server" Dec 05 06:42:54 crc kubenswrapper[4652]: E1205 06:42:54.452263 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerName="extract-content" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.452269 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerName="extract-content" Dec 05 06:42:54 crc kubenswrapper[4652]: E1205 06:42:54.452287 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerName="extract-utilities" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.452293 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerName="extract-utilities" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.452479 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="79f97820-7389-4146-b2a6-f7daedd8f94b" containerName="registry-server" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.452511 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a4f156d-e99f-4948-ba75-7256b3b4510d" containerName="container-00" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.453127 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.551425 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjbjt\" (UniqueName: \"kubernetes.io/projected/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-kube-api-access-hjbjt\") pod \"crc-debug-rs5nn\" (UID: \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\") " pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.551604 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-host\") pod \"crc-debug-rs5nn\" (UID: \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\") " pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.653937 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjbjt\" (UniqueName: \"kubernetes.io/projected/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-kube-api-access-hjbjt\") pod \"crc-debug-rs5nn\" (UID: \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\") " pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.654226 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-host\") pod \"crc-debug-rs5nn\" (UID: \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\") " pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.654278 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-host\") pod \"crc-debug-rs5nn\" (UID: \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\") " pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.669981 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjbjt\" (UniqueName: \"kubernetes.io/projected/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-kube-api-access-hjbjt\") pod \"crc-debug-rs5nn\" (UID: \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\") " pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:54 crc kubenswrapper[4652]: I1205 06:42:54.767578 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:55 crc kubenswrapper[4652]: I1205 06:42:55.086268 4652 generic.go:334] "Generic (PLEG): container finished" podID="b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead" containerID="9374b0db66f60b02d56b7f6973ba860743b0e0523b5d5542e3783c66d80620e3" exitCode=0 Dec 05 06:42:55 crc kubenswrapper[4652]: I1205 06:42:55.086312 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" event={"ID":"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead","Type":"ContainerDied","Data":"9374b0db66f60b02d56b7f6973ba860743b0e0523b5d5542e3783c66d80620e3"} Dec 05 06:42:55 crc kubenswrapper[4652]: I1205 06:42:55.086339 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" event={"ID":"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead","Type":"ContainerStarted","Data":"1fe5a6d38d40fd8b8df5da69429de9c2d01012cbd1edfa566949cf3b097ab8a3"} Dec 05 06:42:55 crc kubenswrapper[4652]: I1205 06:42:55.120676 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-rs5nn"] Dec 05 06:42:55 crc kubenswrapper[4652]: I1205 06:42:55.131475 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jzx4s/crc-debug-rs5nn"] Dec 05 06:42:56 crc kubenswrapper[4652]: I1205 06:42:56.190061 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:56 crc kubenswrapper[4652]: I1205 06:42:56.289394 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-host\") pod \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\" (UID: \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\") " Dec 05 06:42:56 crc kubenswrapper[4652]: I1205 06:42:56.289444 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjbjt\" (UniqueName: \"kubernetes.io/projected/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-kube-api-access-hjbjt\") pod \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\" (UID: \"b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead\") " Dec 05 06:42:56 crc kubenswrapper[4652]: I1205 06:42:56.289497 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-host" (OuterVolumeSpecName: "host") pod "b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead" (UID: "b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:42:56 crc kubenswrapper[4652]: I1205 06:42:56.289997 4652 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-host\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:56 crc kubenswrapper[4652]: I1205 06:42:56.294144 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-kube-api-access-hjbjt" (OuterVolumeSpecName: "kube-api-access-hjbjt") pod "b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead" (UID: "b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead"). InnerVolumeSpecName "kube-api-access-hjbjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:42:56 crc kubenswrapper[4652]: I1205 06:42:56.392489 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjbjt\" (UniqueName: \"kubernetes.io/projected/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead-kube-api-access-hjbjt\") on node \"crc\" DevicePath \"\"" Dec 05 06:42:57 crc kubenswrapper[4652]: I1205 06:42:57.101187 4652 scope.go:117] "RemoveContainer" containerID="9374b0db66f60b02d56b7f6973ba860743b0e0523b5d5542e3783c66d80620e3" Dec 05 06:42:57 crc kubenswrapper[4652]: I1205 06:42:57.101239 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/crc-debug-rs5nn" Dec 05 06:42:58 crc kubenswrapper[4652]: I1205 06:42:58.134740 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead" path="/var/lib/kubelet/pods/b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead/volumes" Dec 05 06:43:16 crc kubenswrapper[4652]: I1205 06:43:16.242649 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6998b65996-4b5mf_8afcb8d2-22df-47a6-991c-c39a75b6834f/barbican-api/0.log" Dec 05 06:43:16 crc kubenswrapper[4652]: I1205 06:43:16.731286 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6998b65996-4b5mf_8afcb8d2-22df-47a6-991c-c39a75b6834f/barbican-api-log/0.log" Dec 05 06:43:16 crc kubenswrapper[4652]: I1205 06:43:16.738230 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5f5b548c68-xzqkg_7ff94817-36ec-4a5d-957e-ca4ccf1c3982/barbican-keystone-listener/0.log" Dec 05 06:43:16 crc kubenswrapper[4652]: I1205 06:43:16.812408 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5f5b548c68-xzqkg_7ff94817-36ec-4a5d-957e-ca4ccf1c3982/barbican-keystone-listener-log/0.log" Dec 05 06:43:16 crc kubenswrapper[4652]: I1205 06:43:16.904656 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-9d75f794c-grj2p_3acc1f50-b762-48ef-a1ee-dae3fa8bfe42/barbican-worker/0.log" Dec 05 06:43:16 crc kubenswrapper[4652]: I1205 06:43:16.935289 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-9d75f794c-grj2p_3acc1f50-b762-48ef-a1ee-dae3fa8bfe42/barbican-worker-log/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.109368 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-8k5rs_627ed016-e8a7-41f8-b474-60cf9b24e5ba/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.183636 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_714dfe3f-61b7-4a86-88bf-3eaf640a4437/ceilometer-central-agent/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.222497 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_714dfe3f-61b7-4a86-88bf-3eaf640a4437/ceilometer-notification-agent/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.287857 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_714dfe3f-61b7-4a86-88bf-3eaf640a4437/sg-core/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.322109 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_714dfe3f-61b7-4a86-88bf-3eaf640a4437/proxy-httpd/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.460055 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9657e858-3fdb-4e7d-973d-21d1a05592b2/cinder-api-log/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.544248 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_9657e858-3fdb-4e7d-973d-21d1a05592b2/cinder-api/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.553940 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b7bdd3b9-536b-4099-9f8c-753eb2725f8b/cinder-scheduler/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.650526 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b7bdd3b9-536b-4099-9f8c-753eb2725f8b/probe/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.711700 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-pws2q_411b62a2-a40e-44ab-adff-aa570ec06501/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.807127 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-vcvww_ea47dfbf-ed1f-4197-9c06-68290a722e2d/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.864122 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-rwqgd_4c98e2af-23aa-4018-bf3f-cd92dbb41ebb/init/0.log" Dec 05 06:43:17 crc kubenswrapper[4652]: I1205 06:43:17.997780 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-rwqgd_4c98e2af-23aa-4018-bf3f-cd92dbb41ebb/init/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.076501 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-p5nj9_91378bb9-4654-44bb-9162-220068b36036/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.148064 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-77b58f4b85-rwqgd_4c98e2af-23aa-4018-bf3f-cd92dbb41ebb/dnsmasq-dns/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.238694 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_eac02c10-37cb-4ddc-9b04-010c4fe70817/glance-httpd/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.242388 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_eac02c10-37cb-4ddc-9b04-010c4fe70817/glance-log/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.377256 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_58c5b24e-27a5-4bd8-adde-5bc3d97aca80/glance-httpd/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.388801 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_58c5b24e-27a5-4bd8-adde-5bc3d97aca80/glance-log/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.598197 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-c77c7b944-twjsn_a1465128-fcb6-49f8-8879-96e87d51b967/horizon/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.713309 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-t8mq4_f53983a0-2e5c-43e3-87eb-e58ed8c7c082/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.838252 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-8282r_49a80212-3f70-4629-b85e-a7a677a9e72e/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:18 crc kubenswrapper[4652]: I1205 06:43:18.922328 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-c77c7b944-twjsn_a1465128-fcb6-49f8-8879-96e87d51b967/horizon-log/0.log" Dec 05 06:43:19 crc kubenswrapper[4652]: I1205 06:43:19.132035 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415241-4bkxc_35c364c2-b30a-448e-a393-ab53fa553df3/keystone-cron/0.log" Dec 05 06:43:19 crc kubenswrapper[4652]: I1205 06:43:19.225985 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_36542228-ef73-49ec-ae3e-471911ca4fcc/kube-state-metrics/0.log" Dec 05 06:43:19 crc kubenswrapper[4652]: I1205 06:43:19.235269 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-c7bb6b57-bpms4_38331fd0-fb06-4672-801b-f9aae8415645/keystone-api/0.log" Dec 05 06:43:19 crc kubenswrapper[4652]: I1205 06:43:19.346358 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-9q65b_03326baf-b566-4e1d-a8e6-07bb1c1535ad/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:19 crc kubenswrapper[4652]: I1205 06:43:19.642217 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-fw54c_8b54e322-d9fc-4a77-815b-461aef555fba/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:19 crc kubenswrapper[4652]: I1205 06:43:19.658736 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9f8cfd587-c4hb6_ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef/neutron-httpd/0.log" Dec 05 06:43:19 crc kubenswrapper[4652]: I1205 06:43:19.722691 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9f8cfd587-c4hb6_ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef/neutron-api/0.log" Dec 05 06:43:20 crc kubenswrapper[4652]: I1205 06:43:20.157480 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4cbffe7e-c5f0-4e24-b01d-e79d5df6dceb/nova-cell0-conductor-conductor/0.log" Dec 05 06:43:20 crc kubenswrapper[4652]: I1205 06:43:20.374600 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d3e21f7d-50b3-48f5-8623-a85830778ab7/nova-cell1-conductor-conductor/0.log" Dec 05 06:43:20 crc kubenswrapper[4652]: I1205 06:43:20.619407 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_ed6932a0-b19b-405a-9266-f19d3c39ecae/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 06:43:20 crc kubenswrapper[4652]: I1205 06:43:20.687442 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_648b01a7-3843-4d88-b8a3-fcd6d0b19231/nova-api-log/0.log" Dec 05 06:43:20 crc kubenswrapper[4652]: I1205 06:43:20.757013 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-9c46b_965041ce-55b9-4c74-b51d-0628c7d13ac9/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:20 crc kubenswrapper[4652]: I1205 06:43:20.935782 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_648b01a7-3843-4d88-b8a3-fcd6d0b19231/nova-api-api/0.log" Dec 05 06:43:20 crc kubenswrapper[4652]: I1205 06:43:20.950942 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d5ed1fac-21dc-4868-aea0-114c5430b87a/nova-metadata-log/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.222083 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b12802a1-fea7-4427-9a10-7c6b2ac6c5bc/mysql-bootstrap/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.331898 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_9602531b-42f8-4992-b338-f184b39ae600/nova-scheduler-scheduler/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.353111 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b12802a1-fea7-4427-9a10-7c6b2ac6c5bc/mysql-bootstrap/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.436284 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b12802a1-fea7-4427-9a10-7c6b2ac6c5bc/galera/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.565120 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_d16da2b5-fe11-4ded-9722-94f4ddb2c8e1/mysql-bootstrap/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.693120 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_d16da2b5-fe11-4ded-9722-94f4ddb2c8e1/galera/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.701664 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_d16da2b5-fe11-4ded-9722-94f4ddb2c8e1/mysql-bootstrap/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.889541 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-9ddrq_bc55c237-0d7b-419f-9b97-966d6b918bda/ovn-controller/0.log" Dec 05 06:43:21 crc kubenswrapper[4652]: I1205 06:43:21.916304 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_7ee40e5d-1fe1-4d60-ac89-85beb2755efa/openstackclient/0.log" Dec 05 06:43:22 crc kubenswrapper[4652]: I1205 06:43:22.104852 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-xthfp_5e2a9269-04a5-4673-a350-9c491689231c/openstack-network-exporter/0.log" Dec 05 06:43:22 crc kubenswrapper[4652]: I1205 06:43:22.277226 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-kpg54_157a2e37-1f93-4c7b-817c-ac64edce5a2f/ovsdb-server-init/0.log" Dec 05 06:43:22 crc kubenswrapper[4652]: I1205 06:43:22.319816 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d5ed1fac-21dc-4868-aea0-114c5430b87a/nova-metadata-metadata/0.log" Dec 05 06:43:22 crc kubenswrapper[4652]: I1205 06:43:22.380082 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-kpg54_157a2e37-1f93-4c7b-817c-ac64edce5a2f/ovsdb-server-init/0.log" Dec 05 06:43:22 crc kubenswrapper[4652]: I1205 06:43:22.451906 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-kpg54_157a2e37-1f93-4c7b-817c-ac64edce5a2f/ovsdb-server/0.log" Dec 05 06:43:22 crc kubenswrapper[4652]: I1205 06:43:22.566653 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-5qc56_499e9d22-ac03-4546-95bb-490b880a35fa/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:22 crc kubenswrapper[4652]: I1205 06:43:22.705730 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-kpg54_157a2e37-1f93-4c7b-817c-ac64edce5a2f/ovs-vswitchd/0.log" Dec 05 06:43:22 crc kubenswrapper[4652]: I1205 06:43:22.795904 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_38d81e02-71a7-4093-b84c-135254187f85/memcached/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.130069 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7425818b-e1fe-4aab-b8db-5c071afb7c9e/openstack-network-exporter/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.142780 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6/openstack-network-exporter/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.150691 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_7425818b-e1fe-4aab-b8db-5c071afb7c9e/ovn-northd/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.157001 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c7a2cdca-ccd6-484e-a46d-fb1824dc5ac6/ovsdbserver-nb/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.278664 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_79ec8106-75f2-456c-91ae-bf0f71304e9b/openstack-network-exporter/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.286309 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_79ec8106-75f2-456c-91ae-bf0f71304e9b/ovsdbserver-sb/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.425784 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-75cb4c4d5b-gg8r8_94817a12-145d-4719-b525-0905f7fdf28c/placement-api/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.468004 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_64c75413-ccc5-4f2b-8ce7-e3891c408fc8/init-config-reloader/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.513450 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-75cb4c4d5b-gg8r8_94817a12-145d-4719-b525-0905f7fdf28c/placement-log/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.603161 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_64c75413-ccc5-4f2b-8ce7-e3891c408fc8/config-reloader/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.626034 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_64c75413-ccc5-4f2b-8ce7-e3891c408fc8/prometheus/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.642323 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_64c75413-ccc5-4f2b-8ce7-e3891c408fc8/init-config-reloader/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.668013 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_64c75413-ccc5-4f2b-8ce7-e3891c408fc8/thanos-sidecar/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.744856 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f3a8fdb1-ae46-469f-9d70-a1947c935abd/setup-container/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.865695 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f3a8fdb1-ae46-469f-9d70-a1947c935abd/setup-container/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.885095 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_f3a8fdb1-ae46-469f-9d70-a1947c935abd/rabbitmq/0.log" Dec 05 06:43:23 crc kubenswrapper[4652]: I1205 06:43:23.892093 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_86f82531-5219-4cd8-9432-1e8dc2a73b08/setup-container/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.020379 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_86f82531-5219-4cd8-9432-1e8dc2a73b08/setup-container/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.021272 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8/setup-container/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.024528 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_86f82531-5219-4cd8-9432-1e8dc2a73b08/rabbitmq/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.537802 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8/setup-container/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.539149 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ad23a29e-807c-4ec3-a29c-2ef1cfa1acd8/rabbitmq/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.575021 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-zqnf5_fb384ad5-89e0-44e5-b4db-09d13e563453/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.692120 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-jtzrx_81e62c63-5cd1-4cac-b717-f37452b33ebe/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.716427 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-bzhfv_d5d1eb7f-02cc-42c2-a956-61c2883ae88c/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.755066 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-7zwcg_c96e3166-ea9f-4421-b31f-dec147d6b7c9/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.841638 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-xrfzl_5878bd6d-36a3-44f1-9238-3c1160202f82/ssh-known-hosts-edpm-deployment/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.925824 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65dffd4ccf-cqtxw_92728b29-e7ee-4aa6-b072-10c3abc0e22a/proxy-server/0.log" Dec 05 06:43:24 crc kubenswrapper[4652]: I1205 06:43:24.999947 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65dffd4ccf-cqtxw_92728b29-e7ee-4aa6-b072-10c3abc0e22a/proxy-httpd/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.063270 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-hg67q_d882dd0e-1ba2-4b38-a548-9d47833aa687/swift-ring-rebalance/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.107942 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/account-auditor/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.178276 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/account-reaper/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.202517 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/account-replicator/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.217098 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/account-server/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.236698 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/container-auditor/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.284410 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/container-replicator/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.340750 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/container-server/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.349594 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/container-updater/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.374136 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/object-auditor/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.390132 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/object-expirer/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.449533 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/object-replicator/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.473833 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/object-server/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.484957 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/object-updater/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.534883 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/swift-recon-cron/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.534895 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f145cea0-a420-4b52-95bb-83042cd8d09b/rsync/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.644857 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-kcxkc_1f297a34-ee2c-4c09-a6fb-00ef6ffa8c3c/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.892438 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_ed44c043-cf02-4901-ab0a-a86753eef277/test-operator-logs-container/0.log" Dec 05 06:43:25 crc kubenswrapper[4652]: I1205 06:43:25.973655 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-st6kl_1e9468f7-9fcc-48bd-9745-af6990cb4091/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 06:43:26 crc kubenswrapper[4652]: I1205 06:43:26.230634 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_a28f4971-2ed4-40ee-a1a9-9c8e72be0cb2/tempest-tests-tempest-tests-runner/0.log" Dec 05 06:43:26 crc kubenswrapper[4652]: I1205 06:43:26.665660 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_e7ae16a1-df00-4d9b-bd3e-16d2d81946d4/watcher-applier/0.log" Dec 05 06:43:27 crc kubenswrapper[4652]: I1205 06:43:27.067618 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8/watcher-api-log/0.log" Dec 05 06:43:28 crc kubenswrapper[4652]: I1205 06:43:28.411010 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_077f273d-5e79-49f7-a780-059832c13655/watcher-decision-engine/0.log" Dec 05 06:43:29 crc kubenswrapper[4652]: I1205 06:43:29.033730 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_bd285ac2-1d04-4d94-b1ac-bc39f2ebb4d8/watcher-api/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.226563 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl_50ba4cc5-ef20-4d56-83b7-8c5eefd7a915/util/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.361695 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl_50ba4cc5-ef20-4d56-83b7-8c5eefd7a915/util/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.380420 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl_50ba4cc5-ef20-4d56-83b7-8c5eefd7a915/pull/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.399081 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl_50ba4cc5-ef20-4d56-83b7-8c5eefd7a915/pull/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.496824 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl_50ba4cc5-ef20-4d56-83b7-8c5eefd7a915/util/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.525066 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl_50ba4cc5-ef20-4d56-83b7-8c5eefd7a915/extract/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.527667 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_917aae072417a6c2fc5ddd97ca05bfedb9fc1cad89a3b1c4d989b78eaf7ckxl_50ba4cc5-ef20-4d56-83b7-8c5eefd7a915/pull/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.648143 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-txx52_b13a9032-c937-442f-b305-c3b3d3fad395/kube-rbac-proxy/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.724797 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-txx52_b13a9032-c937-442f-b305-c3b3d3fad395/manager/0.log" Dec 05 06:43:47 crc kubenswrapper[4652]: I1205 06:43:47.724874 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-tnlt9_e929a962-27ca-476f-9800-5bbd1f57a1d6/kube-rbac-proxy/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.340250 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-tnlt9_e929a962-27ca-476f-9800-5bbd1f57a1d6/manager/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.351973 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-lmtl4_b25da2c0-4af8-4cf9-9c4d-1b15054e9b40/kube-rbac-proxy/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.372603 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-lmtl4_b25da2c0-4af8-4cf9-9c4d-1b15054e9b40/manager/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.505965 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-8qfkt_bdb26248-b4db-48e7-8b0a-ecd525fae23e/kube-rbac-proxy/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.547212 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-8qfkt_bdb26248-b4db-48e7-8b0a-ecd525fae23e/manager/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.670377 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-m26j4_9e53a822-217b-4037-b378-30ad7d875afd/kube-rbac-proxy/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.696310 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-m26j4_9e53a822-217b-4037-b378-30ad7d875afd/manager/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.730167 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-57wt5_92782842-40da-49c5-a384-383efcfd71e1/kube-rbac-proxy/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.820527 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-57wt5_92782842-40da-49c5-a384-383efcfd71e1/manager/0.log" Dec 05 06:43:48 crc kubenswrapper[4652]: I1205 06:43:48.862838 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-2qcwp_4bfee3f2-6683-4de8-9b17-765b4180603a/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.038108 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-hk2qt_a74a4099-4a13-4a2c-bf5e-a9a18187ccfa/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.046483 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-2qcwp_4bfee3f2-6683-4de8-9b17-765b4180603a/manager/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.091059 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-hk2qt_a74a4099-4a13-4a2c-bf5e-a9a18187ccfa/manager/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.201454 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b4ff5_3cc8a91f-1019-4d91-89fa-46eca439c2b3/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.264890 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-b4ff5_3cc8a91f-1019-4d91-89fa-46eca439c2b3/manager/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.391540 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-dt6ws_aad4570c-af5e-4ef6-b985-87eeab6d86be/manager/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.397780 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-dt6ws_aad4570c-af5e-4ef6-b985-87eeab6d86be/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.477801 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-jzvpg_c09fa74b-6157-478a-81ca-ef38b9ac40bf/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.504735 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-jzvpg_c09fa74b-6157-478a-81ca-ef38b9ac40bf/manager/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.571370 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-bn7wf_e270d457-fc48-4d2c-ab72-328d8832260c/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.636819 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-bn7wf_e270d457-fc48-4d2c-ab72-328d8832260c/manager/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.656600 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-d28kq_15d36563-8ee0-4701-8446-0fddc3b64d7a/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.756376 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-d28kq_15d36563-8ee0-4701-8446-0fddc3b64d7a/manager/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.816902 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-ds46q_83410e3b-8d96-4dbd-8392-72418cac098b/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.819986 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-ds46q_83410e3b-8d96-4dbd-8392-72418cac098b/manager/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.911781 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55c85496f58g8cr_55dd49af-10d4-4392-99a1-fb7ae4f3f9d0/kube-rbac-proxy/0.log" Dec 05 06:43:49 crc kubenswrapper[4652]: I1205 06:43:49.982340 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-55c85496f58g8cr_55dd49af-10d4-4392-99a1-fb7ae4f3f9d0/manager/0.log" Dec 05 06:43:50 crc kubenswrapper[4652]: I1205 06:43:50.428179 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-5ghq4_fa35b992-e6b3-49d5-93d0-be938e9a9119/registry-server/0.log" Dec 05 06:43:50 crc kubenswrapper[4652]: I1205 06:43:50.435442 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-55b6fb9447-xrwdp_bd458eaf-351c-46ec-950e-e02dec9040b9/operator/0.log" Dec 05 06:43:50 crc kubenswrapper[4652]: I1205 06:43:50.469509 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pspnk_5c49de75-9faf-4730-ae6f-29bc4fb36554/kube-rbac-proxy/0.log" Dec 05 06:43:50 crc kubenswrapper[4652]: I1205 06:43:50.622044 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-t5drn_3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5/kube-rbac-proxy/0.log" Dec 05 06:43:50 crc kubenswrapper[4652]: I1205 06:43:50.640224 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-pspnk_5c49de75-9faf-4730-ae6f-29bc4fb36554/manager/0.log" Dec 05 06:43:50 crc kubenswrapper[4652]: I1205 06:43:50.718352 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-t5drn_3d7b292f-a7df-4f2f-a770-ae5ae5ee89e5/manager/0.log" Dec 05 06:43:50 crc kubenswrapper[4652]: I1205 06:43:50.822574 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-6lkb4_22a72ff7-7ae3-45ae-bdb4-13db7f13bf9e/operator/0.log" Dec 05 06:43:50 crc kubenswrapper[4652]: I1205 06:43:50.925675 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-ggrv8_1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48/kube-rbac-proxy/0.log" Dec 05 06:43:51 crc kubenswrapper[4652]: I1205 06:43:51.056592 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-ggrv8_1492e6c1-9ca5-43af-8c9b-dbcf4f3b6c48/manager/0.log" Dec 05 06:43:51 crc kubenswrapper[4652]: I1205 06:43:51.145784 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-pcfm9_335d8f40-d377-44e9-93ba-e2ec0a5aa37e/kube-rbac-proxy/0.log" Dec 05 06:43:51 crc kubenswrapper[4652]: I1205 06:43:51.246109 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54bdf956c4-5qx5b_2953519b-8365-4aa7-904e-a1b1c8ee525a/manager/0.log" Dec 05 06:43:51 crc kubenswrapper[4652]: I1205 06:43:51.276680 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-pcfm9_335d8f40-d377-44e9-93ba-e2ec0a5aa37e/manager/0.log" Dec 05 06:43:51 crc kubenswrapper[4652]: I1205 06:43:51.299492 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-x6pt9_220bfae2-7df4-4019-bf69-df29df3cecd9/kube-rbac-proxy/0.log" Dec 05 06:43:51 crc kubenswrapper[4652]: I1205 06:43:51.333848 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-x6pt9_220bfae2-7df4-4019-bf69-df29df3cecd9/manager/0.log" Dec 05 06:43:51 crc kubenswrapper[4652]: I1205 06:43:51.442125 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-kggvr_3c5d5a42-8c45-453b-87c5-46a78fcad90c/kube-rbac-proxy/0.log" Dec 05 06:43:51 crc kubenswrapper[4652]: I1205 06:43:51.512364 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-kggvr_3c5d5a42-8c45-453b-87c5-46a78fcad90c/manager/0.log" Dec 05 06:44:06 crc kubenswrapper[4652]: I1205 06:44:06.023682 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-fc94g_cdcd10ca-d646-4d7d-ac1f-bc7b5ba8d3f6/control-plane-machine-set-operator/0.log" Dec 05 06:44:06 crc kubenswrapper[4652]: I1205 06:44:06.160517 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6cfcs_3541b892-0b8c-4319-b4fa-b4d34cca9e18/kube-rbac-proxy/0.log" Dec 05 06:44:06 crc kubenswrapper[4652]: I1205 06:44:06.164055 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6cfcs_3541b892-0b8c-4319-b4fa-b4d34cca9e18/machine-api-operator/0.log" Dec 05 06:44:16 crc kubenswrapper[4652]: I1205 06:44:16.000752 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-bvrgw_df2f0784-f7cb-40ec-8d8b-4ff0b75578aa/cert-manager-controller/0.log" Dec 05 06:44:16 crc kubenswrapper[4652]: I1205 06:44:16.090215 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-dttvn_2b9b3dfa-9a5c-4267-9198-ebd356b60ced/cert-manager-cainjector/0.log" Dec 05 06:44:16 crc kubenswrapper[4652]: I1205 06:44:16.106802 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-g6642_da9536a9-4fcd-43a8-ad83-88903d8a043e/cert-manager-webhook/0.log" Dec 05 06:44:24 crc kubenswrapper[4652]: I1205 06:44:24.535492 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-tq6cx_ecb29914-d051-4ef2-ae2a-3152e9523d7e/nmstate-console-plugin/0.log" Dec 05 06:44:24 crc kubenswrapper[4652]: I1205 06:44:24.679857 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-m7ckb_fbc4582c-3b3c-4a43-b80b-d944ec9236db/nmstate-handler/0.log" Dec 05 06:44:24 crc kubenswrapper[4652]: I1205 06:44:24.717042 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-8fss9_365195ea-8d24-4d70-950d-73fd7bb88e0e/kube-rbac-proxy/0.log" Dec 05 06:44:24 crc kubenswrapper[4652]: I1205 06:44:24.758028 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-8fss9_365195ea-8d24-4d70-950d-73fd7bb88e0e/nmstate-metrics/0.log" Dec 05 06:44:24 crc kubenswrapper[4652]: I1205 06:44:24.843715 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-6tkg4_44a02e83-b669-4ebd-824d-c392001131a0/nmstate-operator/0.log" Dec 05 06:44:24 crc kubenswrapper[4652]: I1205 06:44:24.896374 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-wlk99_51d7fcc3-f6cf-48b7-a948-471a58961770/nmstate-webhook/0.log" Dec 05 06:44:34 crc kubenswrapper[4652]: I1205 06:44:34.149972 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:44:34 crc kubenswrapper[4652]: I1205 06:44:34.150377 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:44:35 crc kubenswrapper[4652]: I1205 06:44:35.441831 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-zjbfw_410bec27-9b9a-41dd-b45e-a2c9edcba338/kube-rbac-proxy/0.log" Dec 05 06:44:35 crc kubenswrapper[4652]: I1205 06:44:35.565075 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-zjbfw_410bec27-9b9a-41dd-b45e-a2c9edcba338/controller/0.log" Dec 05 06:44:35 crc kubenswrapper[4652]: I1205 06:44:35.599295 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-fmrxv_ca3b9554-659a-435e-9349-4284c9130a23/frr-k8s-webhook-server/0.log" Dec 05 06:44:35 crc kubenswrapper[4652]: I1205 06:44:35.724198 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-frr-files/0.log" Dec 05 06:44:35 crc kubenswrapper[4652]: I1205 06:44:35.846067 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-reloader/0.log" Dec 05 06:44:35 crc kubenswrapper[4652]: I1205 06:44:35.849313 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-reloader/0.log" Dec 05 06:44:35 crc kubenswrapper[4652]: I1205 06:44:35.882928 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-frr-files/0.log" Dec 05 06:44:35 crc kubenswrapper[4652]: I1205 06:44:35.889350 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-metrics/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.034969 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-reloader/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.051136 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-frr-files/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.076481 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-metrics/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.087854 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-metrics/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.207516 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-metrics/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.213544 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-frr-files/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.226603 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/cp-reloader/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.243887 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/controller/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.354912 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/frr-metrics/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.388830 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/kube-rbac-proxy/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.401220 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/kube-rbac-proxy-frr/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.525588 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/reloader/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.612294 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-75747b5f89-glsxz_3186078d-155e-46dd-b2ea-7f0802181e82/manager/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.775966 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5bf4c9cd7b-w67kv_eb813616-370d-43a9-9b81-cd9c93f6dc06/webhook-server/0.log" Dec 05 06:44:36 crc kubenswrapper[4652]: I1205 06:44:36.970356 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w6w66_88cc55da-6280-437e-b2ab-932afe3de7aa/kube-rbac-proxy/0.log" Dec 05 06:44:37 crc kubenswrapper[4652]: I1205 06:44:37.364621 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w6w66_88cc55da-6280-437e-b2ab-932afe3de7aa/speaker/0.log" Dec 05 06:44:37 crc kubenswrapper[4652]: I1205 06:44:37.752793 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zs7fz_210880ed-8b13-4c81-ae22-68fd50621fda/frr/0.log" Dec 05 06:44:45 crc kubenswrapper[4652]: I1205 06:44:45.816627 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4_e5af0d49-ade4-4624-9dab-984dab1bfcca/util/0.log" Dec 05 06:44:45 crc kubenswrapper[4652]: I1205 06:44:45.961167 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4_e5af0d49-ade4-4624-9dab-984dab1bfcca/pull/0.log" Dec 05 06:44:45 crc kubenswrapper[4652]: I1205 06:44:45.972545 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4_e5af0d49-ade4-4624-9dab-984dab1bfcca/pull/0.log" Dec 05 06:44:45 crc kubenswrapper[4652]: I1205 06:44:45.982765 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4_e5af0d49-ade4-4624-9dab-984dab1bfcca/util/0.log" Dec 05 06:44:46 crc kubenswrapper[4652]: I1205 06:44:46.104196 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4_e5af0d49-ade4-4624-9dab-984dab1bfcca/util/0.log" Dec 05 06:44:46 crc kubenswrapper[4652]: I1205 06:44:46.113859 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4_e5af0d49-ade4-4624-9dab-984dab1bfcca/extract/0.log" Dec 05 06:44:46 crc kubenswrapper[4652]: I1205 06:44:46.118379 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff54w4_e5af0d49-ade4-4624-9dab-984dab1bfcca/pull/0.log" Dec 05 06:44:46 crc kubenswrapper[4652]: I1205 06:44:46.225036 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_d005ccd7-aab2-4b15-a71d-fda77a89070b/util/0.log" Dec 05 06:44:46 crc kubenswrapper[4652]: I1205 06:44:46.823537 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_d005ccd7-aab2-4b15-a71d-fda77a89070b/pull/0.log" Dec 05 06:44:46 crc kubenswrapper[4652]: I1205 06:44:46.825164 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_d005ccd7-aab2-4b15-a71d-fda77a89070b/pull/0.log" Dec 05 06:44:46 crc kubenswrapper[4652]: I1205 06:44:46.848617 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_d005ccd7-aab2-4b15-a71d-fda77a89070b/util/0.log" Dec 05 06:44:46 crc kubenswrapper[4652]: I1205 06:44:46.956158 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_d005ccd7-aab2-4b15-a71d-fda77a89070b/util/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.003895 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_d005ccd7-aab2-4b15-a71d-fda77a89070b/pull/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.004017 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210k6468_d005ccd7-aab2-4b15-a71d-fda77a89070b/extract/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.090428 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz_50613739-37b4-41ab-b9b1-73afc91d1fad/util/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.211109 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz_50613739-37b4-41ab-b9b1-73afc91d1fad/pull/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.214583 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz_50613739-37b4-41ab-b9b1-73afc91d1fad/util/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.245667 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz_50613739-37b4-41ab-b9b1-73afc91d1fad/pull/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.345827 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz_50613739-37b4-41ab-b9b1-73afc91d1fad/util/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.353001 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz_50613739-37b4-41ab-b9b1-73afc91d1fad/pull/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.375532 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83t7fdz_50613739-37b4-41ab-b9b1-73afc91d1fad/extract/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.468018 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-txw9m_712da180-0e95-46d1-ae94-66811f03cf96/extract-utilities/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.604317 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-txw9m_712da180-0e95-46d1-ae94-66811f03cf96/extract-utilities/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.608350 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-txw9m_712da180-0e95-46d1-ae94-66811f03cf96/extract-content/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.633880 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-txw9m_712da180-0e95-46d1-ae94-66811f03cf96/extract-content/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.758861 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-txw9m_712da180-0e95-46d1-ae94-66811f03cf96/extract-content/0.log" Dec 05 06:44:47 crc kubenswrapper[4652]: I1205 06:44:47.763785 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-txw9m_712da180-0e95-46d1-ae94-66811f03cf96/extract-utilities/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.227512 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-txw9m_712da180-0e95-46d1-ae94-66811f03cf96/registry-server/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.254675 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qgdwf_3b14b958-34f5-4254-8dc0-9666617dfd56/extract-utilities/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.390166 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qgdwf_3b14b958-34f5-4254-8dc0-9666617dfd56/extract-utilities/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.399880 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qgdwf_3b14b958-34f5-4254-8dc0-9666617dfd56/extract-content/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.413072 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qgdwf_3b14b958-34f5-4254-8dc0-9666617dfd56/extract-content/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.543790 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qgdwf_3b14b958-34f5-4254-8dc0-9666617dfd56/extract-utilities/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.552662 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qgdwf_3b14b958-34f5-4254-8dc0-9666617dfd56/extract-content/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.569404 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/3.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.708486 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mtc94_befe0b4e-bc8f-4a52-8485-aa822dc69415/marketplace-operator/2.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.805641 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xz9mv_91c1ac30-0a51-4501-94ce-53183bf948cf/extract-utilities/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.816452 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qgdwf_3b14b958-34f5-4254-8dc0-9666617dfd56/registry-server/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.898326 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xz9mv_91c1ac30-0a51-4501-94ce-53183bf948cf/extract-utilities/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.921019 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xz9mv_91c1ac30-0a51-4501-94ce-53183bf948cf/extract-content/0.log" Dec 05 06:44:48 crc kubenswrapper[4652]: I1205 06:44:48.930922 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xz9mv_91c1ac30-0a51-4501-94ce-53183bf948cf/extract-content/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.073730 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-84w6t_c6bd24fa-eaf3-430a-bfae-7230e901b63d/extract-utilities/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.074876 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xz9mv_91c1ac30-0a51-4501-94ce-53183bf948cf/extract-content/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.078097 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xz9mv_91c1ac30-0a51-4501-94ce-53183bf948cf/extract-utilities/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.211018 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xz9mv_91c1ac30-0a51-4501-94ce-53183bf948cf/registry-server/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.275030 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-84w6t_c6bd24fa-eaf3-430a-bfae-7230e901b63d/extract-content/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.280953 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-84w6t_c6bd24fa-eaf3-430a-bfae-7230e901b63d/extract-content/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.291115 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-84w6t_c6bd24fa-eaf3-430a-bfae-7230e901b63d/extract-utilities/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.409631 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-84w6t_c6bd24fa-eaf3-430a-bfae-7230e901b63d/extract-content/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.427602 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-84w6t_c6bd24fa-eaf3-430a-bfae-7230e901b63d/extract-utilities/0.log" Dec 05 06:44:49 crc kubenswrapper[4652]: I1205 06:44:49.950688 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-84w6t_c6bd24fa-eaf3-430a-bfae-7230e901b63d/registry-server/0.log" Dec 05 06:44:58 crc kubenswrapper[4652]: I1205 06:44:58.305710 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-rnzbt_3a6c3b19-b3a2-47a0-b6e6-0ac9b2a6a48a/prometheus-operator/0.log" Dec 05 06:44:58 crc kubenswrapper[4652]: I1205 06:44:58.403743 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5ffcbcff99-7zdq6_d9eec5b9-7356-430d-b54a-53451f7eeeb6/prometheus-operator-admission-webhook/0.log" Dec 05 06:44:58 crc kubenswrapper[4652]: I1205 06:44:58.458219 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5ffcbcff99-lswpr_720842c0-3317-47df-a8b0-99380921e48d/prometheus-operator-admission-webhook/0.log" Dec 05 06:44:58 crc kubenswrapper[4652]: I1205 06:44:58.539008 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-7lgmr_4c7f2023-f868-4f76-a833-44988a07307d/operator/0.log" Dec 05 06:44:58 crc kubenswrapper[4652]: I1205 06:44:58.605902 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-hgff6_b8cbd621-1925-4df5-8a63-78c0cc735339/perses-operator/0.log" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.144279 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89"] Dec 05 06:45:00 crc kubenswrapper[4652]: E1205 06:45:00.144918 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead" containerName="container-00" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.144932 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead" containerName="container-00" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.145181 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1b843ff-4c0a-4a5f-a47e-5e1d7cc75ead" containerName="container-00" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.145839 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.147717 4652 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.147746 4652 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.162155 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89"] Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.224698 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq54h\" (UniqueName: \"kubernetes.io/projected/cb52b1bb-39cf-45c9-98db-47579f56e584-kube-api-access-nq54h\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.224828 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb52b1bb-39cf-45c9-98db-47579f56e584-secret-volume\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.224862 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb52b1bb-39cf-45c9-98db-47579f56e584-config-volume\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.326831 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb52b1bb-39cf-45c9-98db-47579f56e584-secret-volume\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.326886 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb52b1bb-39cf-45c9-98db-47579f56e584-config-volume\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.327136 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq54h\" (UniqueName: \"kubernetes.io/projected/cb52b1bb-39cf-45c9-98db-47579f56e584-kube-api-access-nq54h\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.327793 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb52b1bb-39cf-45c9-98db-47579f56e584-config-volume\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.331816 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb52b1bb-39cf-45c9-98db-47579f56e584-secret-volume\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.345154 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq54h\" (UniqueName: \"kubernetes.io/projected/cb52b1bb-39cf-45c9-98db-47579f56e584-kube-api-access-nq54h\") pod \"collect-profiles-29415285-q6r89\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.464181 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:00 crc kubenswrapper[4652]: W1205 06:45:00.856819 4652 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb52b1bb_39cf_45c9_98db_47579f56e584.slice/crio-1f5d1b55ff65f7afd402a81680e9530cfa5ee329de485a077184a90330b11576 WatchSource:0}: Error finding container 1f5d1b55ff65f7afd402a81680e9530cfa5ee329de485a077184a90330b11576: Status 404 returned error can't find the container with id 1f5d1b55ff65f7afd402a81680e9530cfa5ee329de485a077184a90330b11576 Dec 05 06:45:00 crc kubenswrapper[4652]: I1205 06:45:00.857979 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89"] Dec 05 06:45:01 crc kubenswrapper[4652]: I1205 06:45:01.049652 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" event={"ID":"cb52b1bb-39cf-45c9-98db-47579f56e584","Type":"ContainerStarted","Data":"d458e10d86f419cbb8194b1386a6f2c6d1957df26686574a5fa65932b2b56911"} Dec 05 06:45:01 crc kubenswrapper[4652]: I1205 06:45:01.049879 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" event={"ID":"cb52b1bb-39cf-45c9-98db-47579f56e584","Type":"ContainerStarted","Data":"1f5d1b55ff65f7afd402a81680e9530cfa5ee329de485a077184a90330b11576"} Dec 05 06:45:01 crc kubenswrapper[4652]: I1205 06:45:01.065800 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" podStartSLOduration=1.065785387 podStartE2EDuration="1.065785387s" podCreationTimestamp="2025-12-05 06:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:45:01.059696088 +0000 UTC m=+4703.296426355" watchObservedRunningTime="2025-12-05 06:45:01.065785387 +0000 UTC m=+4703.302515654" Dec 05 06:45:02 crc kubenswrapper[4652]: I1205 06:45:02.063107 4652 generic.go:334] "Generic (PLEG): container finished" podID="cb52b1bb-39cf-45c9-98db-47579f56e584" containerID="d458e10d86f419cbb8194b1386a6f2c6d1957df26686574a5fa65932b2b56911" exitCode=0 Dec 05 06:45:02 crc kubenswrapper[4652]: I1205 06:45:02.063304 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" event={"ID":"cb52b1bb-39cf-45c9-98db-47579f56e584","Type":"ContainerDied","Data":"d458e10d86f419cbb8194b1386a6f2c6d1957df26686574a5fa65932b2b56911"} Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.588007 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.690495 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nq54h\" (UniqueName: \"kubernetes.io/projected/cb52b1bb-39cf-45c9-98db-47579f56e584-kube-api-access-nq54h\") pod \"cb52b1bb-39cf-45c9-98db-47579f56e584\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.690576 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb52b1bb-39cf-45c9-98db-47579f56e584-config-volume\") pod \"cb52b1bb-39cf-45c9-98db-47579f56e584\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.690656 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb52b1bb-39cf-45c9-98db-47579f56e584-secret-volume\") pod \"cb52b1bb-39cf-45c9-98db-47579f56e584\" (UID: \"cb52b1bb-39cf-45c9-98db-47579f56e584\") " Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.691934 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb52b1bb-39cf-45c9-98db-47579f56e584-config-volume" (OuterVolumeSpecName: "config-volume") pod "cb52b1bb-39cf-45c9-98db-47579f56e584" (UID: "cb52b1bb-39cf-45c9-98db-47579f56e584"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.695973 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb52b1bb-39cf-45c9-98db-47579f56e584-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cb52b1bb-39cf-45c9-98db-47579f56e584" (UID: "cb52b1bb-39cf-45c9-98db-47579f56e584"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.696145 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb52b1bb-39cf-45c9-98db-47579f56e584-kube-api-access-nq54h" (OuterVolumeSpecName: "kube-api-access-nq54h") pod "cb52b1bb-39cf-45c9-98db-47579f56e584" (UID: "cb52b1bb-39cf-45c9-98db-47579f56e584"). InnerVolumeSpecName "kube-api-access-nq54h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.793402 4652 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb52b1bb-39cf-45c9-98db-47579f56e584-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.793431 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nq54h\" (UniqueName: \"kubernetes.io/projected/cb52b1bb-39cf-45c9-98db-47579f56e584-kube-api-access-nq54h\") on node \"crc\" DevicePath \"\"" Dec 05 06:45:03 crc kubenswrapper[4652]: I1205 06:45:03.793444 4652 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb52b1bb-39cf-45c9-98db-47579f56e584-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:45:04 crc kubenswrapper[4652]: I1205 06:45:04.080471 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" event={"ID":"cb52b1bb-39cf-45c9-98db-47579f56e584","Type":"ContainerDied","Data":"1f5d1b55ff65f7afd402a81680e9530cfa5ee329de485a077184a90330b11576"} Dec 05 06:45:04 crc kubenswrapper[4652]: I1205 06:45:04.080505 4652 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f5d1b55ff65f7afd402a81680e9530cfa5ee329de485a077184a90330b11576" Dec 05 06:45:04 crc kubenswrapper[4652]: I1205 06:45:04.080541 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-q6r89" Dec 05 06:45:04 crc kubenswrapper[4652]: I1205 06:45:04.150510 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:45:04 crc kubenswrapper[4652]: I1205 06:45:04.150602 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:45:04 crc kubenswrapper[4652]: I1205 06:45:04.659164 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn"] Dec 05 06:45:04 crc kubenswrapper[4652]: I1205 06:45:04.666170 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-j6vcn"] Dec 05 06:45:06 crc kubenswrapper[4652]: I1205 06:45:06.134834 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1c2f789-04c2-4616-a921-81d0d5a9c6bb" path="/var/lib/kubelet/pods/f1c2f789-04c2-4616-a921-81d0d5a9c6bb/volumes" Dec 05 06:45:10 crc kubenswrapper[4652]: E1205 06:45:10.622578 4652 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.25.93:55856->192.168.25.93:37713: read tcp 192.168.25.93:55856->192.168.25.93:37713: read: connection reset by peer Dec 05 06:45:34 crc kubenswrapper[4652]: I1205 06:45:34.150341 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:45:34 crc kubenswrapper[4652]: I1205 06:45:34.150761 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:45:34 crc kubenswrapper[4652]: I1205 06:45:34.150798 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:45:34 crc kubenswrapper[4652]: I1205 06:45:34.151234 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"beb45db72cad990812bece4f42e33b4e0ed1610f1d17c0ba8eb299a6992d0374"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:45:34 crc kubenswrapper[4652]: I1205 06:45:34.151272 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://beb45db72cad990812bece4f42e33b4e0ed1610f1d17c0ba8eb299a6992d0374" gracePeriod=600 Dec 05 06:45:34 crc kubenswrapper[4652]: I1205 06:45:34.361955 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="beb45db72cad990812bece4f42e33b4e0ed1610f1d17c0ba8eb299a6992d0374" exitCode=0 Dec 05 06:45:34 crc kubenswrapper[4652]: I1205 06:45:34.362023 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"beb45db72cad990812bece4f42e33b4e0ed1610f1d17c0ba8eb299a6992d0374"} Dec 05 06:45:34 crc kubenswrapper[4652]: I1205 06:45:34.362056 4652 scope.go:117] "RemoveContainer" containerID="e13024a795126ee28fbda000318abbc46b660bfdab427c3fda37b5ae2babd66f" Dec 05 06:45:35 crc kubenswrapper[4652]: I1205 06:45:35.373519 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerStarted","Data":"3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8"} Dec 05 06:45:42 crc kubenswrapper[4652]: I1205 06:45:42.242586 4652 scope.go:117] "RemoveContainer" containerID="fe2f64774b90a19979247133f90428aa071bd48983363f35cf77bd77b37102d7" Dec 05 06:46:30 crc kubenswrapper[4652]: I1205 06:46:30.811280 4652 generic.go:334] "Generic (PLEG): container finished" podID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerID="0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47" exitCode=0 Dec 05 06:46:30 crc kubenswrapper[4652]: I1205 06:46:30.811601 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jzx4s/must-gather-42phd" event={"ID":"71887dbc-5af0-4607-acfd-ee6e12e90d68","Type":"ContainerDied","Data":"0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47"} Dec 05 06:46:30 crc kubenswrapper[4652]: I1205 06:46:30.812098 4652 scope.go:117] "RemoveContainer" containerID="0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47" Dec 05 06:46:31 crc kubenswrapper[4652]: I1205 06:46:31.031785 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jzx4s_must-gather-42phd_71887dbc-5af0-4607-acfd-ee6e12e90d68/gather/0.log" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.078645 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jzx4s/must-gather-42phd"] Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.079201 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-jzx4s/must-gather-42phd" podUID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerName="copy" containerID="cri-o://af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280" gracePeriod=2 Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.085598 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jzx4s/must-gather-42phd"] Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.453472 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jzx4s_must-gather-42phd_71887dbc-5af0-4607-acfd-ee6e12e90d68/copy/0.log" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.454216 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.574100 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9km8h\" (UniqueName: \"kubernetes.io/projected/71887dbc-5af0-4607-acfd-ee6e12e90d68-kube-api-access-9km8h\") pod \"71887dbc-5af0-4607-acfd-ee6e12e90d68\" (UID: \"71887dbc-5af0-4607-acfd-ee6e12e90d68\") " Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.574361 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/71887dbc-5af0-4607-acfd-ee6e12e90d68-must-gather-output\") pod \"71887dbc-5af0-4607-acfd-ee6e12e90d68\" (UID: \"71887dbc-5af0-4607-acfd-ee6e12e90d68\") " Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.584936 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71887dbc-5af0-4607-acfd-ee6e12e90d68-kube-api-access-9km8h" (OuterVolumeSpecName: "kube-api-access-9km8h") pod "71887dbc-5af0-4607-acfd-ee6e12e90d68" (UID: "71887dbc-5af0-4607-acfd-ee6e12e90d68"). InnerVolumeSpecName "kube-api-access-9km8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.677212 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9km8h\" (UniqueName: \"kubernetes.io/projected/71887dbc-5af0-4607-acfd-ee6e12e90d68-kube-api-access-9km8h\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.703966 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71887dbc-5af0-4607-acfd-ee6e12e90d68-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "71887dbc-5af0-4607-acfd-ee6e12e90d68" (UID: "71887dbc-5af0-4607-acfd-ee6e12e90d68"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.779696 4652 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/71887dbc-5af0-4607-acfd-ee6e12e90d68-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.881793 4652 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jzx4s_must-gather-42phd_71887dbc-5af0-4607-acfd-ee6e12e90d68/copy/0.log" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.882261 4652 generic.go:334] "Generic (PLEG): container finished" podID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerID="af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280" exitCode=143 Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.882321 4652 scope.go:117] "RemoveContainer" containerID="af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.882318 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jzx4s/must-gather-42phd" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.900694 4652 scope.go:117] "RemoveContainer" containerID="0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.977938 4652 scope.go:117] "RemoveContainer" containerID="af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280" Dec 05 06:46:39 crc kubenswrapper[4652]: E1205 06:46:39.978916 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280\": container with ID starting with af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280 not found: ID does not exist" containerID="af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.978944 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280"} err="failed to get container status \"af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280\": rpc error: code = NotFound desc = could not find container \"af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280\": container with ID starting with af42ae29cf5b2eb4e94f978e92fbc0025617f658aa6ebe622c2d6d8dbaa1a280 not found: ID does not exist" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.978964 4652 scope.go:117] "RemoveContainer" containerID="0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47" Dec 05 06:46:39 crc kubenswrapper[4652]: E1205 06:46:39.983503 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47\": container with ID starting with 0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47 not found: ID does not exist" containerID="0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47" Dec 05 06:46:39 crc kubenswrapper[4652]: I1205 06:46:39.983528 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47"} err="failed to get container status \"0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47\": rpc error: code = NotFound desc = could not find container \"0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47\": container with ID starting with 0d7d89b4e48a4fc67af3b4ceacd36caf646561ec3004aafb2800e38d1d224a47 not found: ID does not exist" Dec 05 06:46:40 crc kubenswrapper[4652]: I1205 06:46:40.137165 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71887dbc-5af0-4607-acfd-ee6e12e90d68" path="/var/lib/kubelet/pods/71887dbc-5af0-4607-acfd-ee6e12e90d68/volumes" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.497035 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hb4xq"] Dec 05 06:47:00 crc kubenswrapper[4652]: E1205 06:47:00.497860 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb52b1bb-39cf-45c9-98db-47579f56e584" containerName="collect-profiles" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.497875 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb52b1bb-39cf-45c9-98db-47579f56e584" containerName="collect-profiles" Dec 05 06:47:00 crc kubenswrapper[4652]: E1205 06:47:00.497899 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerName="gather" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.497905 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerName="gather" Dec 05 06:47:00 crc kubenswrapper[4652]: E1205 06:47:00.497934 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerName="copy" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.497942 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerName="copy" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.498225 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb52b1bb-39cf-45c9-98db-47579f56e584" containerName="collect-profiles" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.498245 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerName="gather" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.498257 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="71887dbc-5af0-4607-acfd-ee6e12e90d68" containerName="copy" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.499804 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.504467 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hb4xq"] Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.621197 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnh4c\" (UniqueName: \"kubernetes.io/projected/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-kube-api-access-vnh4c\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.621262 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-catalog-content\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.621695 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-utilities\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.723690 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-utilities\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.723788 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnh4c\" (UniqueName: \"kubernetes.io/projected/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-kube-api-access-vnh4c\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.723822 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-catalog-content\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.724121 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-utilities\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.724262 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-catalog-content\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.741121 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnh4c\" (UniqueName: \"kubernetes.io/projected/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-kube-api-access-vnh4c\") pod \"certified-operators-hb4xq\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:00 crc kubenswrapper[4652]: I1205 06:47:00.822721 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:01 crc kubenswrapper[4652]: I1205 06:47:01.230611 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hb4xq"] Dec 05 06:47:02 crc kubenswrapper[4652]: I1205 06:47:02.106786 4652 generic.go:334] "Generic (PLEG): container finished" podID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerID="30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958" exitCode=0 Dec 05 06:47:02 crc kubenswrapper[4652]: I1205 06:47:02.106832 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xq" event={"ID":"ce4a89c2-5fab-4e43-9a60-fa743860bdbc","Type":"ContainerDied","Data":"30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958"} Dec 05 06:47:02 crc kubenswrapper[4652]: I1205 06:47:02.107013 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xq" event={"ID":"ce4a89c2-5fab-4e43-9a60-fa743860bdbc","Type":"ContainerStarted","Data":"6cd32f6c06793d59e837038f17f780b358ca0218e08e1c52efdd40f1eb4179af"} Dec 05 06:47:03 crc kubenswrapper[4652]: I1205 06:47:03.118477 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xq" event={"ID":"ce4a89c2-5fab-4e43-9a60-fa743860bdbc","Type":"ContainerStarted","Data":"0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af"} Dec 05 06:47:04 crc kubenswrapper[4652]: I1205 06:47:04.128037 4652 generic.go:334] "Generic (PLEG): container finished" podID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerID="0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af" exitCode=0 Dec 05 06:47:04 crc kubenswrapper[4652]: I1205 06:47:04.134092 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xq" event={"ID":"ce4a89c2-5fab-4e43-9a60-fa743860bdbc","Type":"ContainerDied","Data":"0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af"} Dec 05 06:47:05 crc kubenswrapper[4652]: I1205 06:47:05.143339 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xq" event={"ID":"ce4a89c2-5fab-4e43-9a60-fa743860bdbc","Type":"ContainerStarted","Data":"b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee"} Dec 05 06:47:05 crc kubenswrapper[4652]: I1205 06:47:05.170390 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hb4xq" podStartSLOduration=2.627077002 podStartE2EDuration="5.170373628s" podCreationTimestamp="2025-12-05 06:47:00 +0000 UTC" firstStartedPulling="2025-12-05 06:47:02.11315637 +0000 UTC m=+4824.349886636" lastFinishedPulling="2025-12-05 06:47:04.656452995 +0000 UTC m=+4826.893183262" observedRunningTime="2025-12-05 06:47:05.16223714 +0000 UTC m=+4827.398967407" watchObservedRunningTime="2025-12-05 06:47:05.170373628 +0000 UTC m=+4827.407103895" Dec 05 06:47:10 crc kubenswrapper[4652]: I1205 06:47:10.823494 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:10 crc kubenswrapper[4652]: I1205 06:47:10.823845 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:10 crc kubenswrapper[4652]: I1205 06:47:10.860396 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:11 crc kubenswrapper[4652]: I1205 06:47:11.235477 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:11 crc kubenswrapper[4652]: I1205 06:47:11.272488 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hb4xq"] Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.206724 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hb4xq" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerName="registry-server" containerID="cri-o://b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee" gracePeriod=2 Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.588182 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.670278 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-utilities\") pod \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.670362 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-catalog-content\") pod \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.670446 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnh4c\" (UniqueName: \"kubernetes.io/projected/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-kube-api-access-vnh4c\") pod \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\" (UID: \"ce4a89c2-5fab-4e43-9a60-fa743860bdbc\") " Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.672477 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-utilities" (OuterVolumeSpecName: "utilities") pod "ce4a89c2-5fab-4e43-9a60-fa743860bdbc" (UID: "ce4a89c2-5fab-4e43-9a60-fa743860bdbc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.676394 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-kube-api-access-vnh4c" (OuterVolumeSpecName: "kube-api-access-vnh4c") pod "ce4a89c2-5fab-4e43-9a60-fa743860bdbc" (UID: "ce4a89c2-5fab-4e43-9a60-fa743860bdbc"). InnerVolumeSpecName "kube-api-access-vnh4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.715797 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce4a89c2-5fab-4e43-9a60-fa743860bdbc" (UID: "ce4a89c2-5fab-4e43-9a60-fa743860bdbc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.773311 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.773339 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnh4c\" (UniqueName: \"kubernetes.io/projected/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-kube-api-access-vnh4c\") on node \"crc\" DevicePath \"\"" Dec 05 06:47:13 crc kubenswrapper[4652]: I1205 06:47:13.773351 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce4a89c2-5fab-4e43-9a60-fa743860bdbc-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.215628 4652 generic.go:334] "Generic (PLEG): container finished" podID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerID="b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee" exitCode=0 Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.215667 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xq" event={"ID":"ce4a89c2-5fab-4e43-9a60-fa743860bdbc","Type":"ContainerDied","Data":"b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee"} Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.215691 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hb4xq" event={"ID":"ce4a89c2-5fab-4e43-9a60-fa743860bdbc","Type":"ContainerDied","Data":"6cd32f6c06793d59e837038f17f780b358ca0218e08e1c52efdd40f1eb4179af"} Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.215710 4652 scope.go:117] "RemoveContainer" containerID="b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.215805 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hb4xq" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.238293 4652 scope.go:117] "RemoveContainer" containerID="0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.238604 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hb4xq"] Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.246245 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hb4xq"] Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.255280 4652 scope.go:117] "RemoveContainer" containerID="30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.291001 4652 scope.go:117] "RemoveContainer" containerID="b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee" Dec 05 06:47:14 crc kubenswrapper[4652]: E1205 06:47:14.291433 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee\": container with ID starting with b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee not found: ID does not exist" containerID="b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.291471 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee"} err="failed to get container status \"b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee\": rpc error: code = NotFound desc = could not find container \"b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee\": container with ID starting with b7f61827539e07e49cf9b7209ad36513e698491271fe1aff132d460510b347ee not found: ID does not exist" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.291495 4652 scope.go:117] "RemoveContainer" containerID="0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af" Dec 05 06:47:14 crc kubenswrapper[4652]: E1205 06:47:14.291868 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af\": container with ID starting with 0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af not found: ID does not exist" containerID="0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.291892 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af"} err="failed to get container status \"0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af\": rpc error: code = NotFound desc = could not find container \"0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af\": container with ID starting with 0e68e9efe30a46e31e1fc35fd21e44ac9cf923b995eb37d833f0ef4a9d7915af not found: ID does not exist" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.291906 4652 scope.go:117] "RemoveContainer" containerID="30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958" Dec 05 06:47:14 crc kubenswrapper[4652]: E1205 06:47:14.292220 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958\": container with ID starting with 30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958 not found: ID does not exist" containerID="30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958" Dec 05 06:47:14 crc kubenswrapper[4652]: I1205 06:47:14.292241 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958"} err="failed to get container status \"30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958\": rpc error: code = NotFound desc = could not find container \"30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958\": container with ID starting with 30afd387c4333f1ee32623fadffd6a15c5db153b92291c18deea486001469958 not found: ID does not exist" Dec 05 06:47:16 crc kubenswrapper[4652]: I1205 06:47:16.137297 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" path="/var/lib/kubelet/pods/ce4a89c2-5fab-4e43-9a60-fa743860bdbc/volumes" Dec 05 06:47:34 crc kubenswrapper[4652]: I1205 06:47:34.150421 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:47:34 crc kubenswrapper[4652]: I1205 06:47:34.150863 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:48:04 crc kubenswrapper[4652]: I1205 06:48:04.150238 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:48:04 crc kubenswrapper[4652]: I1205 06:48:04.150600 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:48:07 crc kubenswrapper[4652]: I1205 06:48:07.937588 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rqmvk"] Dec 05 06:48:07 crc kubenswrapper[4652]: E1205 06:48:07.938351 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerName="extract-content" Dec 05 06:48:07 crc kubenswrapper[4652]: I1205 06:48:07.938365 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerName="extract-content" Dec 05 06:48:07 crc kubenswrapper[4652]: E1205 06:48:07.938375 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerName="extract-utilities" Dec 05 06:48:07 crc kubenswrapper[4652]: I1205 06:48:07.938381 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerName="extract-utilities" Dec 05 06:48:07 crc kubenswrapper[4652]: E1205 06:48:07.938417 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerName="registry-server" Dec 05 06:48:07 crc kubenswrapper[4652]: I1205 06:48:07.938422 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerName="registry-server" Dec 05 06:48:07 crc kubenswrapper[4652]: I1205 06:48:07.938645 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce4a89c2-5fab-4e43-9a60-fa743860bdbc" containerName="registry-server" Dec 05 06:48:07 crc kubenswrapper[4652]: I1205 06:48:07.940132 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:07 crc kubenswrapper[4652]: I1205 06:48:07.946916 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rqmvk"] Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.017969 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-catalog-content\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.018050 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5scs\" (UniqueName: \"kubernetes.io/projected/b9e6bff5-39b3-4221-9947-8cc9c384fe62-kube-api-access-c5scs\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.018165 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-utilities\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.120593 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-utilities\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.120861 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-catalog-content\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.120900 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5scs\" (UniqueName: \"kubernetes.io/projected/b9e6bff5-39b3-4221-9947-8cc9c384fe62-kube-api-access-c5scs\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.121162 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-utilities\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.121319 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-catalog-content\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.136973 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5scs\" (UniqueName: \"kubernetes.io/projected/b9e6bff5-39b3-4221-9947-8cc9c384fe62-kube-api-access-c5scs\") pod \"redhat-marketplace-rqmvk\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.257582 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:08 crc kubenswrapper[4652]: I1205 06:48:08.652060 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rqmvk"] Dec 05 06:48:09 crc kubenswrapper[4652]: I1205 06:48:09.631118 4652 generic.go:334] "Generic (PLEG): container finished" podID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerID="bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c" exitCode=0 Dec 05 06:48:09 crc kubenswrapper[4652]: I1205 06:48:09.631161 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rqmvk" event={"ID":"b9e6bff5-39b3-4221-9947-8cc9c384fe62","Type":"ContainerDied","Data":"bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c"} Dec 05 06:48:09 crc kubenswrapper[4652]: I1205 06:48:09.631696 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rqmvk" event={"ID":"b9e6bff5-39b3-4221-9947-8cc9c384fe62","Type":"ContainerStarted","Data":"38a96379f468b9f8b6d30c2010f2d2f4efa7519695b780a54a23e4e301c200a8"} Dec 05 06:48:09 crc kubenswrapper[4652]: I1205 06:48:09.633270 4652 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:48:10 crc kubenswrapper[4652]: I1205 06:48:10.641640 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rqmvk" event={"ID":"b9e6bff5-39b3-4221-9947-8cc9c384fe62","Type":"ContainerStarted","Data":"bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d"} Dec 05 06:48:11 crc kubenswrapper[4652]: I1205 06:48:11.651689 4652 generic.go:334] "Generic (PLEG): container finished" podID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerID="bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d" exitCode=0 Dec 05 06:48:11 crc kubenswrapper[4652]: I1205 06:48:11.651905 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rqmvk" event={"ID":"b9e6bff5-39b3-4221-9947-8cc9c384fe62","Type":"ContainerDied","Data":"bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d"} Dec 05 06:48:12 crc kubenswrapper[4652]: I1205 06:48:12.661956 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rqmvk" event={"ID":"b9e6bff5-39b3-4221-9947-8cc9c384fe62","Type":"ContainerStarted","Data":"6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677"} Dec 05 06:48:18 crc kubenswrapper[4652]: I1205 06:48:18.257675 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:18 crc kubenswrapper[4652]: I1205 06:48:18.258231 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:18 crc kubenswrapper[4652]: I1205 06:48:18.294330 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:18 crc kubenswrapper[4652]: I1205 06:48:18.312347 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rqmvk" podStartSLOduration=8.796073923 podStartE2EDuration="11.312332957s" podCreationTimestamp="2025-12-05 06:48:07 +0000 UTC" firstStartedPulling="2025-12-05 06:48:09.63307227 +0000 UTC m=+4891.869802537" lastFinishedPulling="2025-12-05 06:48:12.149331304 +0000 UTC m=+4894.386061571" observedRunningTime="2025-12-05 06:48:12.678822346 +0000 UTC m=+4894.915552613" watchObservedRunningTime="2025-12-05 06:48:18.312332957 +0000 UTC m=+4900.549063225" Dec 05 06:48:18 crc kubenswrapper[4652]: I1205 06:48:18.742396 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:18 crc kubenswrapper[4652]: I1205 06:48:18.779776 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rqmvk"] Dec 05 06:48:20 crc kubenswrapper[4652]: I1205 06:48:20.724767 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rqmvk" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerName="registry-server" containerID="cri-o://6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677" gracePeriod=2 Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.117655 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.193726 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5scs\" (UniqueName: \"kubernetes.io/projected/b9e6bff5-39b3-4221-9947-8cc9c384fe62-kube-api-access-c5scs\") pod \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.193824 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-catalog-content\") pod \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.193888 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-utilities\") pod \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\" (UID: \"b9e6bff5-39b3-4221-9947-8cc9c384fe62\") " Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.194997 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-utilities" (OuterVolumeSpecName: "utilities") pod "b9e6bff5-39b3-4221-9947-8cc9c384fe62" (UID: "b9e6bff5-39b3-4221-9947-8cc9c384fe62"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.198107 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9e6bff5-39b3-4221-9947-8cc9c384fe62-kube-api-access-c5scs" (OuterVolumeSpecName: "kube-api-access-c5scs") pod "b9e6bff5-39b3-4221-9947-8cc9c384fe62" (UID: "b9e6bff5-39b3-4221-9947-8cc9c384fe62"). InnerVolumeSpecName "kube-api-access-c5scs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.208492 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b9e6bff5-39b3-4221-9947-8cc9c384fe62" (UID: "b9e6bff5-39b3-4221-9947-8cc9c384fe62"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.296352 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5scs\" (UniqueName: \"kubernetes.io/projected/b9e6bff5-39b3-4221-9947-8cc9c384fe62-kube-api-access-c5scs\") on node \"crc\" DevicePath \"\"" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.296384 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.296395 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9e6bff5-39b3-4221-9947-8cc9c384fe62-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.734126 4652 generic.go:334] "Generic (PLEG): container finished" podID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerID="6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677" exitCode=0 Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.734167 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rqmvk" event={"ID":"b9e6bff5-39b3-4221-9947-8cc9c384fe62","Type":"ContainerDied","Data":"6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677"} Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.734175 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rqmvk" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.734194 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rqmvk" event={"ID":"b9e6bff5-39b3-4221-9947-8cc9c384fe62","Type":"ContainerDied","Data":"38a96379f468b9f8b6d30c2010f2d2f4efa7519695b780a54a23e4e301c200a8"} Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.734212 4652 scope.go:117] "RemoveContainer" containerID="6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.750045 4652 scope.go:117] "RemoveContainer" containerID="bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d" Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.759189 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rqmvk"] Dec 05 06:48:21 crc kubenswrapper[4652]: I1205 06:48:21.766741 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rqmvk"] Dec 05 06:48:22 crc kubenswrapper[4652]: I1205 06:48:22.125175 4652 scope.go:117] "RemoveContainer" containerID="bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c" Dec 05 06:48:22 crc kubenswrapper[4652]: I1205 06:48:22.137747 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" path="/var/lib/kubelet/pods/b9e6bff5-39b3-4221-9947-8cc9c384fe62/volumes" Dec 05 06:48:22 crc kubenswrapper[4652]: I1205 06:48:22.154849 4652 scope.go:117] "RemoveContainer" containerID="6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677" Dec 05 06:48:22 crc kubenswrapper[4652]: E1205 06:48:22.155263 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677\": container with ID starting with 6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677 not found: ID does not exist" containerID="6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677" Dec 05 06:48:22 crc kubenswrapper[4652]: I1205 06:48:22.155300 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677"} err="failed to get container status \"6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677\": rpc error: code = NotFound desc = could not find container \"6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677\": container with ID starting with 6b8c925fe50c84c49dc55cea232d3cdee47799610f0b5247b3a1f0a4ca31d677 not found: ID does not exist" Dec 05 06:48:22 crc kubenswrapper[4652]: I1205 06:48:22.155323 4652 scope.go:117] "RemoveContainer" containerID="bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d" Dec 05 06:48:22 crc kubenswrapper[4652]: E1205 06:48:22.155632 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d\": container with ID starting with bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d not found: ID does not exist" containerID="bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d" Dec 05 06:48:22 crc kubenswrapper[4652]: I1205 06:48:22.155671 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d"} err="failed to get container status \"bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d\": rpc error: code = NotFound desc = could not find container \"bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d\": container with ID starting with bda05e28d28f846ec571d96e36cf42d5142e3266d7f735bc14fb11c494a32f9d not found: ID does not exist" Dec 05 06:48:22 crc kubenswrapper[4652]: I1205 06:48:22.155701 4652 scope.go:117] "RemoveContainer" containerID="bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c" Dec 05 06:48:22 crc kubenswrapper[4652]: E1205 06:48:22.156102 4652 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c\": container with ID starting with bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c not found: ID does not exist" containerID="bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c" Dec 05 06:48:22 crc kubenswrapper[4652]: I1205 06:48:22.156131 4652 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c"} err="failed to get container status \"bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c\": rpc error: code = NotFound desc = could not find container \"bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c\": container with ID starting with bfc7e8ecf225da0055f6245ed396d79a9d0988dfc889220d779665eb9119380c not found: ID does not exist" Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.150698 4652 patch_prober.go:28] interesting pod/machine-config-daemon-s4t24 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.151301 4652 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.151336 4652 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.152147 4652 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8"} pod="openshift-machine-config-operator/machine-config-daemon-s4t24" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.152260 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerName="machine-config-daemon" containerID="cri-o://3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" gracePeriod=600 Dec 05 06:48:34 crc kubenswrapper[4652]: E1205 06:48:34.269254 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.847653 4652 generic.go:334] "Generic (PLEG): container finished" podID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" exitCode=0 Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.847694 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" event={"ID":"0331197d-08f0-4dec-8d8a-72e6019bd2eb","Type":"ContainerDied","Data":"3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8"} Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.847727 4652 scope.go:117] "RemoveContainer" containerID="beb45db72cad990812bece4f42e33b4e0ed1610f1d17c0ba8eb299a6992d0374" Dec 05 06:48:34 crc kubenswrapper[4652]: I1205 06:48:34.848295 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:48:34 crc kubenswrapper[4652]: E1205 06:48:34.848690 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:48:42 crc kubenswrapper[4652]: I1205 06:48:42.721243 4652 scope.go:117] "RemoveContainer" containerID="1fa2e7128d30e4e90fb1ee74143fd2596274bd3705ec370f75d0e601e2e669fa" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.700299 4652 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bp5fk"] Dec 05 06:48:48 crc kubenswrapper[4652]: E1205 06:48:48.701093 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerName="registry-server" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.701109 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerName="registry-server" Dec 05 06:48:48 crc kubenswrapper[4652]: E1205 06:48:48.701129 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerName="extract-utilities" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.701137 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerName="extract-utilities" Dec 05 06:48:48 crc kubenswrapper[4652]: E1205 06:48:48.701148 4652 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerName="extract-content" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.701154 4652 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerName="extract-content" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.701384 4652 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9e6bff5-39b3-4221-9947-8cc9c384fe62" containerName="registry-server" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.702758 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.715681 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bp5fk"] Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.872974 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtbl4\" (UniqueName: \"kubernetes.io/projected/99286a43-7bef-45b4-bb57-6b0c669957d5-kube-api-access-qtbl4\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.873116 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-utilities\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.873431 4652 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-catalog-content\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.975609 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-utilities\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.975821 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-catalog-content\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.975925 4652 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtbl4\" (UniqueName: \"kubernetes.io/projected/99286a43-7bef-45b4-bb57-6b0c669957d5-kube-api-access-qtbl4\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.976047 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-utilities\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:48 crc kubenswrapper[4652]: I1205 06:48:48.976227 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-catalog-content\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:49 crc kubenswrapper[4652]: I1205 06:48:49.007851 4652 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtbl4\" (UniqueName: \"kubernetes.io/projected/99286a43-7bef-45b4-bb57-6b0c669957d5-kube-api-access-qtbl4\") pod \"redhat-operators-bp5fk\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:49 crc kubenswrapper[4652]: I1205 06:48:49.018220 4652 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:49 crc kubenswrapper[4652]: I1205 06:48:49.125633 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:48:49 crc kubenswrapper[4652]: E1205 06:48:49.126190 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:48:49 crc kubenswrapper[4652]: I1205 06:48:49.423270 4652 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bp5fk"] Dec 05 06:48:49 crc kubenswrapper[4652]: I1205 06:48:49.966852 4652 generic.go:334] "Generic (PLEG): container finished" podID="99286a43-7bef-45b4-bb57-6b0c669957d5" containerID="66f5d562160f8d941a5a71b2b1b451abf949e5a136ff542c766ab7c3c261e76f" exitCode=0 Dec 05 06:48:49 crc kubenswrapper[4652]: I1205 06:48:49.966901 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp5fk" event={"ID":"99286a43-7bef-45b4-bb57-6b0c669957d5","Type":"ContainerDied","Data":"66f5d562160f8d941a5a71b2b1b451abf949e5a136ff542c766ab7c3c261e76f"} Dec 05 06:48:49 crc kubenswrapper[4652]: I1205 06:48:49.967102 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp5fk" event={"ID":"99286a43-7bef-45b4-bb57-6b0c669957d5","Type":"ContainerStarted","Data":"673e9030185c552959f105999c6fb878dee851befa04357a13261d56d7e83d65"} Dec 05 06:48:50 crc kubenswrapper[4652]: I1205 06:48:50.977272 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp5fk" event={"ID":"99286a43-7bef-45b4-bb57-6b0c669957d5","Type":"ContainerStarted","Data":"997e5ee383b8ed0c9774914a0dce7b5a5c16951a4b802f8fa4fd02a9cc879174"} Dec 05 06:48:52 crc kubenswrapper[4652]: I1205 06:48:52.995669 4652 generic.go:334] "Generic (PLEG): container finished" podID="99286a43-7bef-45b4-bb57-6b0c669957d5" containerID="997e5ee383b8ed0c9774914a0dce7b5a5c16951a4b802f8fa4fd02a9cc879174" exitCode=0 Dec 05 06:48:52 crc kubenswrapper[4652]: I1205 06:48:52.995711 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp5fk" event={"ID":"99286a43-7bef-45b4-bb57-6b0c669957d5","Type":"ContainerDied","Data":"997e5ee383b8ed0c9774914a0dce7b5a5c16951a4b802f8fa4fd02a9cc879174"} Dec 05 06:48:54 crc kubenswrapper[4652]: I1205 06:48:54.006358 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp5fk" event={"ID":"99286a43-7bef-45b4-bb57-6b0c669957d5","Type":"ContainerStarted","Data":"ce832abf3620b59a1634083ff68eed713ea94fe9654ad719ea26a46b055cb05d"} Dec 05 06:48:54 crc kubenswrapper[4652]: I1205 06:48:54.024614 4652 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bp5fk" podStartSLOduration=2.501018919 podStartE2EDuration="6.024600119s" podCreationTimestamp="2025-12-05 06:48:48 +0000 UTC" firstStartedPulling="2025-12-05 06:48:49.968887101 +0000 UTC m=+4932.205617369" lastFinishedPulling="2025-12-05 06:48:53.492468302 +0000 UTC m=+4935.729198569" observedRunningTime="2025-12-05 06:48:54.020181982 +0000 UTC m=+4936.256912249" watchObservedRunningTime="2025-12-05 06:48:54.024600119 +0000 UTC m=+4936.261330386" Dec 05 06:48:59 crc kubenswrapper[4652]: I1205 06:48:59.018943 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:59 crc kubenswrapper[4652]: I1205 06:48:59.019346 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:59 crc kubenswrapper[4652]: I1205 06:48:59.053526 4652 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:48:59 crc kubenswrapper[4652]: I1205 06:48:59.086710 4652 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:49:02 crc kubenswrapper[4652]: I1205 06:49:02.286527 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bp5fk"] Dec 05 06:49:02 crc kubenswrapper[4652]: I1205 06:49:02.287507 4652 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bp5fk" podUID="99286a43-7bef-45b4-bb57-6b0c669957d5" containerName="registry-server" containerID="cri-o://ce832abf3620b59a1634083ff68eed713ea94fe9654ad719ea26a46b055cb05d" gracePeriod=2 Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.078742 4652 generic.go:334] "Generic (PLEG): container finished" podID="99286a43-7bef-45b4-bb57-6b0c669957d5" containerID="ce832abf3620b59a1634083ff68eed713ea94fe9654ad719ea26a46b055cb05d" exitCode=0 Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.078810 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp5fk" event={"ID":"99286a43-7bef-45b4-bb57-6b0c669957d5","Type":"ContainerDied","Data":"ce832abf3620b59a1634083ff68eed713ea94fe9654ad719ea26a46b055cb05d"} Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.186479 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.246676 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtbl4\" (UniqueName: \"kubernetes.io/projected/99286a43-7bef-45b4-bb57-6b0c669957d5-kube-api-access-qtbl4\") pod \"99286a43-7bef-45b4-bb57-6b0c669957d5\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.246805 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-utilities\") pod \"99286a43-7bef-45b4-bb57-6b0c669957d5\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.246833 4652 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-catalog-content\") pod \"99286a43-7bef-45b4-bb57-6b0c669957d5\" (UID: \"99286a43-7bef-45b4-bb57-6b0c669957d5\") " Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.247452 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-utilities" (OuterVolumeSpecName: "utilities") pod "99286a43-7bef-45b4-bb57-6b0c669957d5" (UID: "99286a43-7bef-45b4-bb57-6b0c669957d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.306155 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99286a43-7bef-45b4-bb57-6b0c669957d5-kube-api-access-qtbl4" (OuterVolumeSpecName: "kube-api-access-qtbl4") pod "99286a43-7bef-45b4-bb57-6b0c669957d5" (UID: "99286a43-7bef-45b4-bb57-6b0c669957d5"). InnerVolumeSpecName "kube-api-access-qtbl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.317853 4652 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99286a43-7bef-45b4-bb57-6b0c669957d5" (UID: "99286a43-7bef-45b4-bb57-6b0c669957d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.349100 4652 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtbl4\" (UniqueName: \"kubernetes.io/projected/99286a43-7bef-45b4-bb57-6b0c669957d5-kube-api-access-qtbl4\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.349133 4652 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:03 crc kubenswrapper[4652]: I1205 06:49:03.349143 4652 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99286a43-7bef-45b4-bb57-6b0c669957d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:49:04 crc kubenswrapper[4652]: I1205 06:49:04.089639 4652 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bp5fk" event={"ID":"99286a43-7bef-45b4-bb57-6b0c669957d5","Type":"ContainerDied","Data":"673e9030185c552959f105999c6fb878dee851befa04357a13261d56d7e83d65"} Dec 05 06:49:04 crc kubenswrapper[4652]: I1205 06:49:04.089700 4652 scope.go:117] "RemoveContainer" containerID="ce832abf3620b59a1634083ff68eed713ea94fe9654ad719ea26a46b055cb05d" Dec 05 06:49:04 crc kubenswrapper[4652]: I1205 06:49:04.089991 4652 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bp5fk" Dec 05 06:49:04 crc kubenswrapper[4652]: I1205 06:49:04.110612 4652 scope.go:117] "RemoveContainer" containerID="997e5ee383b8ed0c9774914a0dce7b5a5c16951a4b802f8fa4fd02a9cc879174" Dec 05 06:49:04 crc kubenswrapper[4652]: I1205 06:49:04.122200 4652 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bp5fk"] Dec 05 06:49:04 crc kubenswrapper[4652]: I1205 06:49:04.126170 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:49:04 crc kubenswrapper[4652]: E1205 06:49:04.126446 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:49:04 crc kubenswrapper[4652]: I1205 06:49:04.134676 4652 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bp5fk"] Dec 05 06:49:04 crc kubenswrapper[4652]: I1205 06:49:04.149591 4652 scope.go:117] "RemoveContainer" containerID="66f5d562160f8d941a5a71b2b1b451abf949e5a136ff542c766ab7c3c261e76f" Dec 05 06:49:06 crc kubenswrapper[4652]: I1205 06:49:06.134479 4652 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99286a43-7bef-45b4-bb57-6b0c669957d5" path="/var/lib/kubelet/pods/99286a43-7bef-45b4-bb57-6b0c669957d5/volumes" Dec 05 06:49:16 crc kubenswrapper[4652]: I1205 06:49:16.126852 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:49:16 crc kubenswrapper[4652]: E1205 06:49:16.127689 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:49:24 crc kubenswrapper[4652]: I1205 06:49:24.914616 4652 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-9f8cfd587-c4hb6" podUID="ee6b7b3b-6c37-4d26-8bc6-1f96c94031ef" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 05 06:49:30 crc kubenswrapper[4652]: I1205 06:49:30.127304 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:49:30 crc kubenswrapper[4652]: E1205 06:49:30.128481 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:49:42 crc kubenswrapper[4652]: I1205 06:49:42.773549 4652 scope.go:117] "RemoveContainer" containerID="387f9ae3bec9f47cbabcde58b8ed37bc8dc761e94d80268a5bc9ace71d6b3eee" Dec 05 06:49:45 crc kubenswrapper[4652]: I1205 06:49:45.125765 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:49:45 crc kubenswrapper[4652]: E1205 06:49:45.127107 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:49:56 crc kubenswrapper[4652]: I1205 06:49:56.125717 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:49:56 crc kubenswrapper[4652]: E1205 06:49:56.126389 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:50:10 crc kubenswrapper[4652]: I1205 06:50:10.125812 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:50:10 crc kubenswrapper[4652]: E1205 06:50:10.126803 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:50:24 crc kubenswrapper[4652]: I1205 06:50:24.126938 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:50:24 crc kubenswrapper[4652]: E1205 06:50:24.127692 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:50:39 crc kubenswrapper[4652]: I1205 06:50:39.125141 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:50:39 crc kubenswrapper[4652]: E1205 06:50:39.125668 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:50:52 crc kubenswrapper[4652]: I1205 06:50:52.128233 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:50:52 crc kubenswrapper[4652]: E1205 06:50:52.129912 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:51:05 crc kubenswrapper[4652]: I1205 06:51:05.126542 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:51:05 crc kubenswrapper[4652]: E1205 06:51:05.127469 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:51:20 crc kubenswrapper[4652]: I1205 06:51:20.127265 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:51:20 crc kubenswrapper[4652]: E1205 06:51:20.128880 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" Dec 05 06:51:35 crc kubenswrapper[4652]: I1205 06:51:35.126218 4652 scope.go:117] "RemoveContainer" containerID="3ba7821508aa3549e581c742e430630319fc6001faf337c14a17857606d026f8" Dec 05 06:51:35 crc kubenswrapper[4652]: E1205 06:51:35.127130 4652 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-s4t24_openshift-machine-config-operator(0331197d-08f0-4dec-8d8a-72e6019bd2eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-s4t24" podUID="0331197d-08f0-4dec-8d8a-72e6019bd2eb" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114500375024446 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114500376017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114466154016514 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114466154015464 5ustar corecore